improve compound dtype handling, regenerate and add 2.7.0

This commit is contained in:
sneakers-the-rat 2024-07-03 23:36:20 -07:00
parent ec81032ae8
commit 4211b6058b
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
215 changed files with 23525 additions and 2632 deletions

View file

@ -1 +1 @@
from .pydantic.core.v2_6_0_alpha.namespace import *
from .pydantic.core.v2_7_0.namespace import *

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,6 +55,7 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_0.hdmf_common_table import Container, Data, DynamicTable
metamodel_version = "None"
version = "2.2.0"
@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +112,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -222,7 +254,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.2.0"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.2.0"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.2.0"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -156,7 +189,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -185,14 +220,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -226,7 +263,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -246,7 +283,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -256,7 +295,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -266,7 +307,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -277,23 +320,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -313,7 +370,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -343,5 +400,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_0.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.2.0"
@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -122,6 +156,29 @@ class TimeIntervalsTagsIndex(VectorIndex):
)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -139,4 +196,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,14 +53,28 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_1_0.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
metamodel_version = "None"
version = "2.2.0"
@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_0.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_1_0.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.2.0"
@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.2.0"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -280,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -317,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_0.hdmf_common_table import (
VectorData,
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
)
from .core_nwb_base import TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
metamodel_version = "None"
version = "2.2.0"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
bands: str = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesBands(DynamicTable):
@ -306,18 +349,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -337,7 +384,9 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.2.0"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,18 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
TimeSeriesSync,
NWBDataInterface,
TimeSeries,
NWBContainer,
)
from .core_nwb_image import ImageSeries
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion
metamodel_version = "None"
version = "2.2.0"
@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -200,7 +236,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -210,7 +248,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -220,9 +260,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = (
Field(default_factory=dict)
)
children: Optional[
List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]
] = Field(default_factory=dict)
name: str = Field(...)
@ -233,9 +273,15 @@ class ImagingPlane(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
description: Optional[str] = Field(
None, description="""Description of the imaging plane."""
)
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
imaging_rate: float = Field(
..., description="""Rate that images are acquired, in Hz."""
)
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@ -320,8 +366,12 @@ class OpticalChannel(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
description: str = Field(
..., description="""Description or other notes about the channel."""
)
emission_lambda: float = Field(
..., description="""Emission wavelength for channel, in nm."""
)
class MotionCorrection(NWBDataInterface):
@ -330,7 +380,9 @@ class MotionCorrection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,8 +54,10 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBData, NWBDataInterface
from .core_nwb_image import GrayscaleImage
metamodel_version = "None"
version = "2.2.0"
@ -46,7 +76,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +106,9 @@ class RetinotopyMap(NWBData):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -95,7 +127,9 @@ class AxisMap(RetinotopyMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class RetinotopyImage(GrayscaleImage):
@ -113,14 +147,18 @@ class RetinotopyImage(GrayscaleImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -136,7 +174,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -183,7 +221,9 @@ class ImagingRetinotopyAxis1PhaseMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis1PowerMap(AxisMap):
@ -201,7 +241,9 @@ class ImagingRetinotopyAxis1PowerMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis2PhaseMap(AxisMap):
@ -219,7 +261,9 @@ class ImagingRetinotopyAxis2PhaseMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis2PowerMap(AxisMap):
@ -237,7 +281,9 @@ class ImagingRetinotopyAxis2PowerMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopySignMap(RetinotopyMap):
@ -251,7 +297,9 @@ class ImagingRetinotopySignMap(RetinotopyMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -262,7 +310,9 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["focal_depth_image"] = Field("focal_depth_image")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
@ -271,14 +321,18 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -303,14 +357,18 @@ class ImagingRetinotopyVasculatureImage(RetinotopyImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],

View file

@ -1,23 +1,220 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_0.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_1_0.hdmf_common_table import (
Data,
Index,
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
Container,
DynamicTable,
)
from .core_nwb_retinotopy import (
RetinotopyMap,
AxisMap,
RetinotopyImage,
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopySignMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
ImagingPlane,
ImagingPlaneManifold,
ImagingPlaneOriginCoords,
ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
)
from .core_nwb_device import Device
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
)
from .core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
Subject,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.2.0"
@ -36,7 +233,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,6 +55,7 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_2.hdmf_common_table import Container, Data, DynamicTable
metamodel_version = "None"
version = "2.2.1"
@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +112,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -222,7 +254,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.2.1"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.2.1"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.2.1"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -156,7 +189,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -185,14 +220,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -226,7 +263,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -246,7 +283,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -256,7 +295,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -266,7 +307,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -277,23 +320,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -313,7 +370,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -343,5 +400,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_2.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.2.1"
@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -122,6 +156,29 @@ class TimeIntervalsTagsIndex(VectorIndex):
)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -139,4 +196,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,14 +53,28 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_1_2.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
metamodel_version = "None"
version = "2.2.1"
@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_2.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_1_2.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.2.1"
@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.2.1"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -280,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -317,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_2.hdmf_common_table import (
VectorData,
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
)
from .core_nwb_base import TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
metamodel_version = "None"
version = "2.2.1"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
bands: str = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesBands(DynamicTable):
@ -306,18 +349,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -337,7 +384,9 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.2.1"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,18 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
TimeSeriesSync,
NWBDataInterface,
TimeSeries,
NWBContainer,
)
from .core_nwb_image import ImageSeries
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion
metamodel_version = "None"
version = "2.2.1"
@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -200,7 +236,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -210,7 +248,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -220,9 +260,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = (
Field(default_factory=dict)
)
children: Optional[
List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]
] = Field(default_factory=dict)
name: str = Field(...)
@ -233,9 +273,15 @@ class ImagingPlane(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
description: Optional[str] = Field(
None, description="""Description of the imaging plane."""
)
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
imaging_rate: float = Field(
..., description="""Rate that images are acquired, in Hz."""
)
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@ -320,8 +366,12 @@ class OpticalChannel(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
description: str = Field(
..., description="""Description or other notes about the channel."""
)
emission_lambda: float = Field(
..., description="""Emission wavelength for channel, in nm."""
)
class MotionCorrection(NWBDataInterface):
@ -330,7 +380,9 @@ class MotionCorrection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,8 +54,10 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBData, NWBDataInterface
from .core_nwb_image import GrayscaleImage
metamodel_version = "None"
version = "2.2.1"
@ -46,7 +76,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +106,9 @@ class RetinotopyMap(NWBData):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -95,7 +127,9 @@ class AxisMap(RetinotopyMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class RetinotopyImage(GrayscaleImage):
@ -113,14 +147,18 @@ class RetinotopyImage(GrayscaleImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -136,7 +174,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -183,7 +221,9 @@ class ImagingRetinotopyAxis1PhaseMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis1PowerMap(AxisMap):
@ -201,7 +241,9 @@ class ImagingRetinotopyAxis1PowerMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis2PhaseMap(AxisMap):
@ -219,7 +261,9 @@ class ImagingRetinotopyAxis2PhaseMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopyAxis2PowerMap(AxisMap):
@ -237,7 +281,9 @@ class ImagingRetinotopyAxis2PowerMap(AxisMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
class ImagingRetinotopySignMap(RetinotopyMap):
@ -251,7 +297,9 @@ class ImagingRetinotopySignMap(RetinotopyMap):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -262,7 +310,9 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["focal_depth_image"] = Field("focal_depth_image")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
@ -271,14 +321,18 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -303,14 +357,18 @@ class ImagingRetinotopyVasculatureImage(RetinotopyImage):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],

View file

@ -1,23 +1,220 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_2.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_1_2.hdmf_common_table import (
Data,
Index,
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
Container,
DynamicTable,
)
from .core_nwb_retinotopy import (
RetinotopyMap,
AxisMap,
RetinotopyImage,
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopySignMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
ImagingPlane,
ImagingPlaneManifold,
ImagingPlaneOriginCoords,
ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
)
from .core_nwb_device import Device
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
)
from .core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
Subject,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.2.1"
@ -36,7 +233,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,6 +55,7 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
metamodel_version = "None"
version = "2.2.2"
@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +112,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -222,7 +254,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.2.2"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.2.2"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.2.2"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -282,7 +323,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -329,7 +386,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -359,5 +416,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.2.2"
@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,14 +53,28 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
metamodel_version = "None"
version = "2.2.2"
@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.2.2"
@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.2.2"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries):
data: Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
] = Field(
..., description="""Images presented to subject, either grayscale or RGB"""
)
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
)
from .core_nwb_base import TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
metamodel_version = "None"
version = "2.2.2"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
bands: str = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesBands(DynamicTable):
@ -306,18 +349,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -337,7 +384,9 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.2.2"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,18 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
TimeSeriesSync,
NWBDataInterface,
TimeSeries,
NWBContainer,
)
from .core_nwb_image import ImageSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion
metamodel_version = "None"
version = "2.2.2"
@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -208,7 +244,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -218,7 +256,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -228,9 +268,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = (
Field(default_factory=dict)
)
children: Optional[
List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]
] = Field(default_factory=dict)
name: str = Field(...)
@ -250,7 +290,9 @@ class MotionCorrection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict)
children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,20 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,6 +55,7 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBDataInterface
metamodel_version = "None"
version = "2.2.2"
@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)

View file

@ -1,23 +1,213 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
Data,
Index,
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
Container,
DynamicTable,
)
from .core_nwb_retinotopy import (
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopySignMap,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
ImagingPlane,
MotionCorrection,
)
from .core_nwb_device import Device
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
)
from .core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
Subject,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.2.2"
@ -36,7 +226,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,6 +55,7 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
metamodel_version = "None"
version = "2.2.4"
@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +112,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -222,7 +254,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.2.4"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.2.4"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.2.4"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -282,7 +323,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -329,7 +386,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -359,5 +416,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.2.4"
@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,21 +53,35 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
)
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import (
NWBContainer,
NWBData,
NWBDataInterface,
ProcessingModule,
TimeSeries,
ProcessingModule,
NWBDataInterface,
)
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_ogen import OptogeneticStimulusSite
metamodel_version = "None"
version = "2.2.4"
@ -60,7 +100,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -201,7 +241,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -214,7 +256,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -253,9 +297,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.2.4"
@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.2.4"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries):
data: Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
] = Field(
..., description="""Images presented to subject, either grayscale or RGB"""
)
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
)
from .core_nwb_base import TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
metamodel_version = "None"
version = "2.2.4"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
bands: str = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesBands(DynamicTable):
@ -306,18 +349,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -337,7 +384,9 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.2.4"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,17 +53,23 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
DynamicTable,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
NWBDataInterface,
TimeSeries,
NWBDataInterface,
TimeSeriesSync,
)
from .core_nwb_image import ImageSeries
metamodel_version = "None"
version = "2.2.4"
@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(
default_factory=dict
)
name: str = Field(...)
@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""")
pixel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
pixel_mask_index: Optional[str] = Field(
None, description="""Index into pixel_mask."""
)
pixel_mask: Optional[str] = Field(
None,
description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""")
voxel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
voxel_mask_index: Optional[str] = Field(
None, description="""Index into voxel_mask."""
)
voxel_mask: Optional[str] = Field(
None,
description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
reference_images: Optional[List[ImageSeries] | ImageSeries] = Field(
@ -300,6 +347,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class PlaneSegmentationPixelMask(VectorData):
"""
Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["pixel_mask"] = Field("pixel_mask")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class PlaneSegmentationVoxelMaskIndex(VectorIndex):
"""
Index into voxel_mask.
@ -314,13 +384,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class PlaneSegmentationVoxelMask(VectorData):
"""
Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["voxel_mask"] = Field("voxel_mask")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ImagingPlane(NWBContainer):
"""
An imaging plane and its metadata.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(
default_factory=dict
)
name: str = Field(...)
@ -331,8 +427,12 @@ class OpticalChannel(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
description: str = Field(
..., description="""Description or other notes about the channel."""
)
emission_lambda: float = Field(
..., description="""Emission wavelength for channel, in nm."""
)
class MotionCorrection(NWBDataInterface):
@ -374,7 +474,9 @@ Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
PlaneSegmentation.model_rebuild()
PlaneSegmentationPixelMaskIndex.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMaskIndex.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()

View file

@ -1,20 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,6 +55,7 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBDataInterface
metamodel_version = "None"
version = "2.2.4"
@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)

View file

@ -1,23 +1,222 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
Data,
Index,
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
Container,
DynamicTable,
)
from .core_nwb_retinotopy import (
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopySignMap,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
PlaneSegmentation,
PlaneSegmentationPixelMaskIndex,
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMaskIndex,
PlaneSegmentationVoxelMask,
ImagingPlane,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
)
from .core_nwb_device import Device
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
)
from .core_nwb_file import (
ScratchData,
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
LabMetaData,
Subject,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.2.4"
@ -36,7 +235,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,6 +55,7 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable
metamodel_version = "None"
version = "2.2.5"
@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +112,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -222,7 +254,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.2.5"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.2.5"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.2.5"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -282,7 +323,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -329,7 +386,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -359,5 +416,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.2.5"
@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,21 +53,35 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
)
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import (
NWBContainer,
NWBData,
NWBDataInterface,
ProcessingModule,
TimeSeries,
ProcessingModule,
NWBDataInterface,
)
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_ogen import OptogeneticStimulusSite
metamodel_version = "None"
version = "2.2.5"
@ -60,7 +100,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -201,7 +241,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -214,7 +256,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -253,9 +297,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.2.5"
@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.2.5"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries):
data: Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
] = Field(
..., description="""Images presented to subject, either grayscale or RGB"""
)
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorData,
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
)
from .core_nwb_base import TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
metamodel_version = "None"
version = "2.2.5"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
bands: str = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesBands(DynamicTable):
@ -306,18 +349,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -337,7 +384,9 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.2.5"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,17 +53,23 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
DynamicTable,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
NWBDataInterface,
TimeSeries,
NWBDataInterface,
TimeSeriesSync,
)
from .core_nwb_image import ImageSeries
metamodel_version = "None"
version = "2.2.5"
@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(
default_factory=dict
)
name: str = Field(...)
@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""")
pixel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
pixel_mask_index: Optional[str] = Field(
None, description="""Index into pixel_mask."""
)
pixel_mask: Optional[str] = Field(
None,
description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""")
voxel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
voxel_mask_index: Optional[str] = Field(
None, description="""Index into voxel_mask."""
)
voxel_mask: Optional[str] = Field(
None,
description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
reference_images: Optional[List[ImageSeries] | ImageSeries] = Field(
@ -300,6 +347,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class PlaneSegmentationPixelMask(VectorData):
"""
Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["pixel_mask"] = Field("pixel_mask")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class PlaneSegmentationVoxelMaskIndex(VectorIndex):
"""
Index into voxel_mask.
@ -314,13 +384,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex):
array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None)
class PlaneSegmentationVoxelMask(VectorData):
"""
Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["voxel_mask"] = Field("voxel_mask")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ImagingPlane(NWBContainer):
"""
An imaging plane and its metadata.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(
default_factory=dict
)
name: str = Field(...)
@ -331,8 +427,12 @@ class OpticalChannel(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
description: str = Field(
..., description="""Description or other notes about the channel."""
)
emission_lambda: float = Field(
..., description="""Emission wavelength for channel, in nm."""
)
class MotionCorrection(NWBDataInterface):
@ -374,7 +474,9 @@ Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
PlaneSegmentation.model_rebuild()
PlaneSegmentationPixelMaskIndex.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMaskIndex.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()

View file

@ -1,20 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,6 +55,7 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBDataInterface
metamodel_version = "None"
version = "2.2.5"
@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)

View file

@ -1,23 +1,222 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_1_3.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_1_3.hdmf_common_table import (
Data,
Index,
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
Container,
DynamicTable,
)
from .core_nwb_retinotopy import (
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopySignMap,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
PlaneSegmentation,
PlaneSegmentationPixelMaskIndex,
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMaskIndex,
PlaneSegmentationVoxelMask,
ImagingPlane,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
)
from .core_nwb_device import Device
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
)
from .core_nwb_file import (
ScratchData,
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
LabMetaData,
Subject,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.2.5"
@ -36,7 +235,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,8 +54,10 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable
metamodel_version = "None"
version = "2.3.0"
@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -85,7 +114,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -120,7 +151,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -227,7 +260,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.3.0"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.3.0"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.3.0"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -290,7 +331,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -337,7 +394,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -367,5 +424,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.3.0"
@ -49,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -131,6 +164,37 @@ class TimeIntervalsTagsIndex(VectorIndex):
] = Field(None)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -159,4 +223,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,19 +53,31 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable
from .core_nwb_misc import Units
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import (
NWBContainer,
NWBData,
NWBDataInterface,
ProcessingModule,
TimeSeries,
ProcessingModule,
NWBDataInterface,
)
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
from .core_nwb_icephys import SweepTable, IntracellularElectrode
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
from .core_nwb_ogen import OptogeneticStimulusSite
metamodel_version = "None"
version = "2.3.0"
@ -58,7 +96,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -199,7 +237,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -212,7 +252,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -251,9 +293,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,15 +53,20 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
VectorIndex,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
TimeSeries,
TimeSeriesSync,
)
from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorIndex,
VectorData,
DynamicTable,
)
metamodel_version = "None"
version = "2.3.0"
@ -53,7 +85,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -91,7 +123,9 @@ class PatchClampSeries(TimeSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -140,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded voltage.""")
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
bias_current: Optional[float] = Field(
None, description="""Bias current, in amps."""
)
bridge_balance: Optional[float] = Field(
None, description="""Bridge balance, in ohms."""
)
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
@ -156,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -208,8 +248,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
)
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
bias_current: float = Field(
..., description="""Bias current, in amps, fixed to 0.0."""
)
bridge_balance: float = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
@ -222,7 +266,9 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -268,7 +314,9 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -317,8 +365,12 @@ class VoltageClampSeries(PatchClampSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Recorded current.""")
capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""")
capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""")
capacitance_fast: Optional[str] = Field(
None, description="""Fast capacitance, in farads."""
)
capacitance_slow: Optional[str] = Field(
None, description="""Slow capacitance, in farads."""
)
resistance_comp_bandwidth: Optional[str] = Field(
None, description="""Resistance compensation bandwidth, in hertz."""
)
@ -345,7 +397,9 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -476,7 +530,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp")
name: Literal["whole_cell_series_resistance_comp"] = Field(
"whole_cell_series_resistance_comp"
)
unit: Optional[str] = Field(
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
@ -503,7 +559,9 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -555,7 +613,9 @@ class IntracellularElectrode(NWBContainer):
...,
description="""Description of electrode (e.g., whole-cell, sharp, etc.).""",
)
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
filtering: Optional[str] = Field(
None, description="""Electrode specific filtering."""
)
initial_access_resistance: Optional[str] = Field(
None, description="""Initial access resistance."""
)
@ -563,8 +623,12 @@ class IntracellularElectrode(NWBContainer):
None,
description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
resistance: Optional[str] = Field(
None, description="""Electrode resistance, in ohms."""
)
seal: Optional[str] = Field(
None, description="""Information about seal used for recording."""
)
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)

View file

@ -1,21 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -25,7 +53,8 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import Image, TimeSeries
from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries
metamodel_version = "None"
version = "2.3.0"
@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -74,7 +103,9 @@ class GrayscaleImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -94,7 +125,9 @@ class RGBImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -114,7 +147,9 @@ class RGBAImage(Image):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -148,7 +183,9 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries):
data: Union[
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
] = Field(
..., description="""Images presented to subject, either grayscale or RGB"""
)
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -315,7 +358,9 @@ class IndexSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Index of the frame in the referenced ImageSeries."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",

View file

@ -1,20 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel):
tree_root: bool = False
class Arraylike(ConfiguredBaseModel):
"""
Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Arraylike.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,12 +54,16 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
VectorData,
DynamicTable,
)
from .core_nwb_base import TimeSeries
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync
metamodel_version = "None"
version = "2.3.0"
@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries):
...,
description="""Description of the features represented in TimeSeries::data.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries):
data: NDArray[Shape["* num_times"], str] = Field(
..., description="""Annotations made during an experiment."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries):
data: NDArray[Shape["* num_times"], int] = Field(
..., description="""Use values >0 if interval started, <0 if interval ended."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -214,7 +251,9 @@ class DecompositionSeries(TimeSeries):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
data: str = Field(..., description="""Data decomposed into frequency bands.""")
metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""")
metric: str = Field(
..., description="""The metric used, e.g. phase, amplitude, power."""
)
source_channels: Optional[str] = Field(
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
@ -223,7 +262,9 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -261,7 +302,9 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None)
array: Optional[
NDArray[Shape["* num_times, * num_channels, * num_bands"], float]
] = Field(None)
class DecompositionSeriesSourceChannels(DynamicTableRegion):
@ -331,18 +374,22 @@ class Units(DynamicTable):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Units")
spike_times_index: Optional[str] = Field(
None, description="""Index into the spike_times dataset."""
)
spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""")
spike_times: Optional[str] = Field(
None, description="""Spike times for each unit."""
)
obs_intervals_index: Optional[str] = Field(
None, description="""Index into the obs_intervals dataset."""
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field(
None, description="""Observation intervals for each unit."""
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = (
Field(None, description="""Observation intervals for each unit.""")
)
electrodes_index: Optional[str] = Field(
None, description="""Index into electrodes."""
)
electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""")
electrodes: Optional[str] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
@ -362,10 +409,14 @@ class Units(DynamicTable):
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
] = Field(
None, description="""Spike waveform standard deviation for each spike unit."""
)
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = (
Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
)
)
waveforms_index: Optional[str] = Field(
None,

View file

@ -1,25 +1,54 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
@ -27,8 +56,11 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBContainer,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
)
metamodel_version = "None"
version = "2.3.0"
@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries):
data: NDArray[Shape["* num_times"], float] = Field(
..., description="""Applied power for optogenetic stimulus, in watts."""
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(
..., description="""Excitation wavelength, in nm."""
)
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,17 +53,23 @@ if TYPE_CHECKING:
import numpy as np
from .core_nwb_image import ImageSeriesExternalFile, ImageSeries
from ...hdmf_common.v1_5_0.hdmf_common_table import (
DynamicTable,
DynamicTableRegion,
VectorData,
VectorIndex,
DynamicTable,
)
from .core_nwb_base import (
TimeSeriesStartingTime,
NWBContainer,
NWBDataInterface,
TimeSeries,
NWBDataInterface,
TimeSeriesSync,
)
from .core_nwb_image import ImageSeries
metamodel_version = "None"
version = "2.3.0"
@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries):
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict)
children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict)
children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(
default_factory=dict
)
name: str = Field(...)
@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable):
None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
)
pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""")
pixel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
pixel_mask_index: Optional[str] = Field(
None, description="""Index into pixel_mask."""
)
pixel_mask: Optional[str] = Field(
None,
description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""")
voxel_mask: Optional[List[Any] | Any] = Field(
default_factory=list,
voxel_mask_index: Optional[str] = Field(
None, description="""Index into voxel_mask."""
)
voxel_mask: Optional[str] = Field(
None,
description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
)
reference_images: Optional[List[ImageSeries] | ImageSeries] = Field(
@ -307,6 +354,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex):
] = Field(None)
class PlaneSegmentationPixelMask(VectorData):
"""
Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["pixel_mask"] = Field("pixel_mask")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class PlaneSegmentationVoxelMaskIndex(VectorIndex):
"""
Index into voxel_mask.
@ -331,13 +401,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex):
] = Field(None)
class PlaneSegmentationVoxelMask(VectorData):
"""
Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["voxel_mask"] = Field("voxel_mask")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ImagingPlane(NWBContainer):
"""
An imaging plane and its metadata.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict)
children: Optional[List[OpticalChannel] | OpticalChannel] = Field(
default_factory=dict
)
name: str = Field(...)
@ -348,8 +444,12 @@ class OpticalChannel(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
description: str = Field(
..., description="""Description or other notes about the channel."""
)
emission_lambda: float = Field(
..., description="""Emission wavelength for channel, in nm."""
)
class MotionCorrection(NWBDataInterface):
@ -391,7 +491,9 @@ Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
PlaneSegmentation.model_rebuild()
PlaneSegmentationPixelMaskIndex.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMaskIndex.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()

View file

@ -1,20 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,6 +55,7 @@ if TYPE_CHECKING:
from .core_nwb_base import NWBDataInterface
metamodel_version = "None"
version = "2.3.0"
@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ImagingRetinotopy")
axis_1_phase_map: str = Field(
..., description="""Phase response to stimulus on the first measured axis."""
)
@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[float] = Field(
None, description="""Focal depth offset, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None)
@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
field_of_view: Optional[float] = Field(
None, description="""Size of viewing area, in meters."""
)
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)

View file

@ -1,23 +1,231 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import (
ExternalResources,
ExternalResourcesKeys,
ExternalResourcesEntities,
ExternalResourcesResources,
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
)
from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData
from .core_nwb_retinotopy import (
ImagingRetinotopy,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopySignMap,
ImagingRetinotopyVasculatureImage,
)
from .core_nwb_base import (
NWBData,
Image,
NWBContainer,
NWBDataInterface,
TimeSeries,
TimeSeriesData,
TimeSeriesStartingTime,
TimeSeriesSync,
ProcessingModule,
Images,
)
from .core_nwb_ophys import (
TwoPhotonSeries,
RoiResponseSeries,
RoiResponseSeriesRois,
DfOverF,
Fluorescence,
ImageSegmentation,
PlaneSegmentation,
PlaneSegmentationPixelMaskIndex,
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMaskIndex,
PlaneSegmentationVoxelMask,
ImagingPlane,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
)
from .core_nwb_device import Device
from .core_nwb_image import (
GrayscaleImage,
RGBImage,
RGBAImage,
ImageSeries,
ImageSeriesExternalFile,
ImageMaskSeries,
OpticalSeries,
IndexSeries,
)
from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite
from .core_nwb_icephys import (
PatchClampSeries,
PatchClampSeriesData,
CurrentClampSeries,
CurrentClampSeriesData,
IZeroClampSeries,
CurrentClampStimulusSeries,
CurrentClampStimulusSeriesData,
VoltageClampSeries,
VoltageClampSeriesData,
VoltageClampSeriesCapacitanceFast,
VoltageClampSeriesCapacitanceSlow,
VoltageClampSeriesResistanceCompBandwidth,
VoltageClampSeriesResistanceCompCorrection,
VoltageClampSeriesResistanceCompPrediction,
VoltageClampSeriesWholeCellCapacitanceComp,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeries,
VoltageClampStimulusSeriesData,
IntracellularElectrode,
SweepTable,
SweepTableSeriesIndex,
)
from .core_nwb_ecephys import (
ElectricalSeries,
ElectricalSeriesElectrodes,
SpikeEventSeries,
FeatureExtraction,
FeatureExtractionElectrodes,
EventDetection,
EventWaveform,
FilteredEphys,
LFP,
ElectrodeGroup,
ElectrodeGroupPosition,
ClusterWaveforms,
Clustering,
)
from .core_nwb_behavior import (
SpatialSeries,
SpatialSeriesData,
BehavioralEpochs,
BehavioralEvents,
BehavioralTimeSeries,
PupilTracking,
EyeTracking,
CompassDirection,
Position,
)
from .core_nwb_misc import (
AbstractFeatureSeries,
AbstractFeatureSeriesData,
AnnotationSeries,
IntervalSeries,
DecompositionSeries,
DecompositionSeriesData,
DecompositionSeriesSourceChannels,
DecompositionSeriesBands,
Units,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervalsIndex,
UnitsElectrodesIndex,
UnitsElectrodes,
UnitsWaveformsIndex,
UnitsWaveformsIndexIndex,
)
from .core_nwb_file import (
ScratchData,
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
NWBFileGeneralSourceScript,
NWBFileGeneralExtracellularEphys,
NWBFileGeneralExtracellularEphysElectrodes,
NWBFileGeneralIntracellularEphys,
LabMetaData,
Subject,
)
from .core_nwb_epoch import (
TimeIntervals,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTimeseriesIndex,
)
metamodel_version = "None"
version = "2.3.0"
@ -36,7 +244,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,8 +53,10 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData
metamodel_version = "None"
version = "2.4.0"
@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -81,7 +110,18 @@ class TimeSeriesReferenceVectorData(VectorData):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("timeseries")
idx_start: int = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: int = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
)
timeseries: str = Field(
..., description="""The TimeSeries that this index applies to"""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -105,7 +145,9 @@ class Image(NWBData):
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[str] = Field(
None, description="""Description of the image."""
)
array: Optional[
Union[
NDArray[Shape["* x, * y"], float],
@ -140,7 +182,9 @@ class TimeSeries(NWBDataInterface):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -247,7 +291,7 @@ class Images(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Images")
description: Optional[str] = Field(
None, description="""Description of this collection of images."""
)

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -28,10 +55,14 @@ if TYPE_CHECKING:
from .core_nwb_base import (
NWBDataInterface,
TimeSeriesStartingTime,
TimeSeriesSync,
TimeSeries,
)
from .core_nwb_misc import IntervalSeries
metamodel_version = "None"
version = "2.4.0"
@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries):
None,
description="""Description defining what exactly 'straight-ahead' means.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict)
children: Optional[List[IntervalSeries] | IntervalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -196,7 +235,9 @@ class Position(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict)
children: Optional[List[SpatialSeries] | SpatialSeries] = Field(
default_factory=dict
)
name: str = Field(...)

View file

@ -1,26 +1,61 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
pass
from typing import Literal
else:
pass
from typing_extensions import Literal
if TYPE_CHECKING:
import numpy as np
from .core_nwb_base import NWBContainer
metamodel_version = "None"
version = "2.4.0"
@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -26,13 +53,17 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion
from .core_nwb_base import (
NWBContainer,
TimeSeriesStartingTime,
NWBDataInterface,
TimeSeriesSync,
TimeSeries,
NWBContainer,
)
metamodel_version = "None"
version = "2.4.0"
@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
)
description: Optional[str] = Field(None, description="""Description of the time series.""")
description: Optional[str] = Field(
None, description="""Description of the time series."""
)
comments: Optional[str] = Field(
None,
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("FeatureExtraction")
description: NDArray[Shape["* num_features"], str] = Field(
...,
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = (
Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
)
)
times: NDArray[Shape["* num_events"], float] = Field(
...,
@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("EventDetection")
detection_method: str = Field(
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict)
children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -290,7 +331,9 @@ class LFP(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict)
children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(
default_factory=dict
)
name: str = Field(...)
@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer):
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of this electrode group.""")
description: Optional[str] = Field(
None, description="""Description of this electrode group."""
)
location: Optional[str] = Field(
None,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[Any] = Field(
position: Optional[str] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
class ElectrodeGroupPosition(ConfiguredBaseModel):
"""
stereotaxic or common framework coordinates
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["position"] = Field("position")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
"""
DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("ClusterWaveforms")
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
@ -337,7 +394,7 @@ class Clustering(NWBDataInterface):
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True)
name: str = Field(...)
name: str = Field("Clustering")
description: str = Field(
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
@ -367,5 +424,6 @@ EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ElectrodeGroupPosition.model_rebuild()
ClusterWaveforms.model_rebuild()
Clustering.model_rebuild()

View file

@ -1,22 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,10 +54,14 @@ if TYPE_CHECKING:
from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorData,
DynamicTable,
VectorIndex,
)
from .core_nwb_base import TimeSeries
metamodel_version = "None"
version = "2.4.0"
@ -49,7 +80,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -86,10 +117,12 @@ class TimeIntervals(DynamicTable):
description="""User-defined tags that identify or categorize events.""",
)
tags_index: Optional[str] = Field(None, description="""Index for tags.""")
timeseries: Optional[List[Any] | Any] = Field(
default_factory=list, description="""An index into a TimeSeries object."""
timeseries: Optional[str] = Field(
None, description="""An index into a TimeSeries object."""
)
timeseries_index: Optional[str] = Field(
None, description="""Index for timeseries."""
)
timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""")
colnames: Optional[str] = Field(
None,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
@ -131,6 +164,37 @@ class TimeIntervalsTagsIndex(VectorIndex):
] = Field(None)
class TimeIntervalsTimeseries(VectorData):
"""
An index into a TimeSeries object.
"""
linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True)
name: Literal["timeseries"] = Field("timeseries")
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)
timeseries: Optional[str] = Field(
None, description="""the TimeSeries that this index applies to."""
)
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
array: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class TimeIntervalsTimeseriesIndex(VectorIndex):
"""
Index for timeseries.
@ -159,4 +223,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -1,23 +1,49 @@
from __future__ import annotations
import sys
from datetime import datetime
from datetime import datetime, date
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
List,
Dict,
Optional,
Any,
Union,
ClassVar,
Annotated,
TypeVar,
List,
TYPE_CHECKING,
)
from pydantic import BaseModel as BaseModel, Field
from pydantic import ConfigDict, BeforeValidator
from nptyping import (
Shape,
Float,
Float32,
Double,
Float64,
LongLong,
Int64,
Int,
Int32,
Int16,
Short,
Int8,
UInt,
UInt32,
UInt16,
UInt8,
UInt64,
Number,
String,
Unicode,
Unicode,
Unicode,
String,
Bool,
Datetime64,
)
from pydantic import BaseModel as BaseModel
from pydantic import ConfigDict, Field
from nwb_linkml.types import NDArray
import sys
if sys.version_info >= (3, 8):
from typing import Literal
@ -27,19 +53,39 @@ if TYPE_CHECKING:
import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable
from .core_nwb_misc import Units
from .core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
SimultaneousRecordingsTable,
IntracellularRecordingsTable,
SweepTable,
SequentialRecordingsTable,
IntracellularElectrode,
)
from .core_nwb_device import Device
from .core_nwb_ecephys import ElectrodeGroup
from .core_nwb_epoch import TimeIntervals
from .core_nwb_base import (
NWBContainer,
NWBData,
NWBDataInterface,
ProcessingModule,
TimeSeries,
ProcessingModule,
NWBDataInterface,
)
from .core_nwb_device import Device
from .core_nwb_epoch import TimeIntervals
from .core_nwb_ogen import OptogeneticStimulusSite
from .core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
from .core_nwb_ogen import OptogeneticStimulusSite
metamodel_version = "None"
version = "2.4.0"
@ -58,7 +104,7 @@ class ConfiguredBaseModel(BaseModel):
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
def __getitem__(self, i: slice | int) -> np.ndarray:
def __getitem__(self, i: slice | int) -> "np.ndarray":
if hasattr(self, "array"):
return self.array[i]
else:
@ -199,7 +245,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field(
None, description="""Terms to search over."""
)
lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""")
lab: Optional[str] = Field(
None, description="""Laboratory where experiment was performed."""
)
notes: Optional[str] = Field(None, description="""Notes about the experiment.""")
pharmacology: Optional[str] = Field(
None,
@ -212,7 +260,9 @@ class NWBFileGeneral(ConfiguredBaseModel):
related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field(
None, description="""Publication information. PMID, DOI, URL, etc."""
)
session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""")
session_id: Optional[str] = Field(
None, description="""Lab-specific ID for the session."""
)
slices: Optional[str] = Field(
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
@ -251,9 +301,11 @@ class NWBFileGeneral(ConfiguredBaseModel):
intracellular_ephys: Optional[str] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = (
Field(
default_factory=dict,
description="""Metadata describing optogenetic stimuluation.""",
)
)
optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field(
default_factory=dict, description="""Metadata related to optophysiology."""

Some files were not shown because too many files have changed in this diff Show more