regenerate models, lint

This commit is contained in:
sneakers-the-rat 2024-07-31 01:56:48 -07:00
parent 1d527d8f71
commit 0d10ba9d75
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
238 changed files with 2056 additions and 13337 deletions

View file

@ -38,7 +38,6 @@ class NWBPydanticGenerator(PydanticGenerator):
Subclass of pydantic generator, custom behavior is in overridden lifecycle methods :)
"""
injected_fields: List[str] = (
(
'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object'

View file

@ -33,7 +33,9 @@ def _make_dtypes() -> List[TypeDefinition]:
# repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None
atype = TypeDefinition(
name=nwbtype, minimum_value=amin, typeof=linkmltype, # repr=repr_string
name=nwbtype,
minimum_value=amin,
typeof=linkmltype, # repr=repr_string
)
DTypeTypes.append(atype)
return DTypeTypes

View file

@ -83,15 +83,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -424,17 +424,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -102,7 +102,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -116,11 +116,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -738,7 +736,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,13 +284,12 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
orientation: Optional[str] = Field(
@ -299,11 +298,11 @@ class OpticalSeries(ImageSeries):
)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -325,12 +324,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -358,7 +357,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,12 +371,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -441,7 +441,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -474,14 +474,14 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
colnames: Optional[str] = Field(
@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height"], np.float32],
]
Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -280,8 +276,8 @@ class ImagingPlane(NWBContainer):
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@ -321,7 +317,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
)
@ -331,8 +327,8 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* height, * width, 3 x_y_z"], np.float32],
NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32],
NDArray[Shape["* height, * width, 3 x_y_z"], float],
NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
]
] = Field(None)
@ -353,7 +349,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel):
unit: Optional[str] = Field(
None, description="""Measurement units for origin_coords. The default value is 'meters'."""
)
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -384,7 +380,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel):
unit: Optional[str] = Field(
None, description="""Measurement units for grid_spacing. The default value is 'meters'."""
)
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -408,9 +404,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -96,14 +96,12 @@ class RetinotopyMap(NWBData):
)
name: str = Field(...)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -124,19 +122,17 @@ class AxisMap(RetinotopyMap):
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
class RetinotopyImage(GrayscaleImage):
@ -149,29 +145,27 @@ class RetinotopyImage(GrayscaleImage):
)
name: str = Field(...)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -262,32 +256,28 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
}
},
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
bits_per_pixel: Optional[np.int32] = Field(
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)

View file

@ -83,15 +83,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -424,17 +424,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -102,7 +102,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -116,11 +116,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -738,7 +736,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,13 +284,12 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
orientation: Optional[str] = Field(
@ -299,11 +298,11 @@ class OpticalSeries(ImageSeries):
)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -325,12 +324,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -358,7 +357,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,12 +371,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -441,7 +441,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -474,14 +474,14 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
colnames: Optional[str] = Field(
@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height"], np.float32],
]
Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -280,8 +276,8 @@ class ImagingPlane(NWBContainer):
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@ -321,7 +317,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
)
@ -331,8 +327,8 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* height, * width, 3 x_y_z"], np.float32],
NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32],
NDArray[Shape["* height, * width, 3 x_y_z"], float],
NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
]
] = Field(None)
@ -353,7 +349,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel):
unit: Optional[str] = Field(
None, description="""Measurement units for origin_coords. The default value is 'meters'."""
)
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -384,7 +380,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel):
unit: Optional[str] = Field(
None, description="""Measurement units for grid_spacing. The default value is 'meters'."""
)
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -408,9 +404,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -96,14 +96,12 @@ class RetinotopyMap(NWBData):
)
name: str = Field(...)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -124,19 +122,17 @@ class AxisMap(RetinotopyMap):
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
class RetinotopyImage(GrayscaleImage):
@ -149,29 +145,27 @@ class RetinotopyImage(GrayscaleImage):
)
name: str = Field(...)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -262,32 +256,28 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
}
},
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
bits_per_pixel: Optional[np.int32] = Field(
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)

View file

@ -83,15 +83,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -424,17 +424,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -102,7 +102,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -116,11 +116,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -738,7 +736,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -356,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -370,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -441,7 +441,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -474,14 +474,14 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
colnames: Optional[str] = Field(
@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height"], np.float32],
]
Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -83,15 +83,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -424,17 +424,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -118,7 +118,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -132,11 +132,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -547,7 +547,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -738,7 +736,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -356,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -370,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -441,7 +441,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -474,14 +474,14 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
colnames: Optional[str] = Field(
@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,24 +114,21 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height"], np.float32],
]
Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -153,12 +150,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -187,8 +184,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -206,12 +202,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -374,9 +370,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -403,10 +399,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -446,9 +442,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -83,15 +83,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -424,17 +424,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -118,7 +118,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -132,11 +132,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -547,7 +547,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -738,7 +736,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -356,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -370,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -441,7 +441,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -474,14 +474,14 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
colnames: Optional[str] = Field(
@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,24 +114,23 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -153,12 +152,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -187,8 +186,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -206,12 +204,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -374,9 +372,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -403,10 +401,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -446,9 +444,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -88,15 +88,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -148,12 +148,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -182,11 +182,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -221,11 +221,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -432,17 +432,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -170,11 +170,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -118,7 +118,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -132,11 +132,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
filtering: NDArray[Any, np.float32] = Field(
filtering: NDArray[Any, float] = Field(
...,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -544,7 +544,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -259,18 +259,16 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -283,12 +281,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -320,10 +318,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -336,12 +334,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -412,10 +410,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -428,12 +426,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -489,7 +487,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -512,7 +510,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -535,7 +533,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -558,7 +556,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -581,7 +579,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -604,7 +602,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -627,7 +625,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -644,10 +642,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -660,12 +658,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -739,7 +737,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -142,11 +142,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -168,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -356,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -370,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -445,7 +445,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -478,17 +478,17 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
json_schema_extra={
@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,24 +114,23 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Optional[
Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -153,12 +152,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -187,8 +186,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -206,12 +204,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -371,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -400,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -443,9 +441,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -90,11 +90,11 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
idx_start: np.int32 = Field(
idx_start: int = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: np.int32 = Field(
count: int = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
)
@ -122,15 +122,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -182,12 +182,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -216,11 +216,11 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -255,11 +255,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)

View file

@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -432,17 +432,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@ -170,11 +170,11 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
idx_start: Optional[np.int32] = Field(
idx_start: Optional[int] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: Optional[np.int32] = Field(
count: Optional[int] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
)

View file

@ -126,7 +126,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -140,11 +140,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -380,7 +380,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: NDArray[Any, np.float32] = Field(
x: NDArray[Any, float] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -389,7 +389,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: NDArray[Any, np.float32] = Field(
y: NDArray[Any, float] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -398,7 +398,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: NDArray[Any, np.float32] = Field(
z: NDArray[Any, float] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -407,7 +407,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: NDArray[Any, np.float32] = Field(
imp: NDArray[Any, float] = Field(
...,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@ -446,7 +446,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -455,7 +455,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -464,7 +464,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -573,7 +573,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -746,7 +744,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -141,13 +141,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -169,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -205,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,13 +224,12 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -253,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -286,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -325,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -358,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -445,7 +445,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -478,17 +478,17 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
json_schema_extra={
@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,25 +114,23 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -154,12 +152,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -188,8 +186,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -207,12 +204,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -401,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -444,9 +441,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -114,11 +114,11 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
idx_start: np.int32 = Field(
idx_start: int = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: np.int32 = Field(
count: int = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
)
@ -146,15 +146,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -221,12 +221,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,15 +255,15 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
offset: Optional[np.float32] = Field(
offset: Optional[float] = Field(
None,
description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -298,11 +298,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,10 +133,10 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, 1 x"], np.number],
NDArray[Shape["* num_times, 2 x_y"], np.number],
NDArray[Shape["* num_times, 3 x_y_z"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, 1 x"], float],
NDArray[Shape["* num_times, 2 x_y"], float],
NDArray[Shape["* num_times, 3 x_y_z"], float],
]
] = Field(None)

View file

@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -432,17 +432,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={

View file

@ -127,7 +127,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -141,11 +141,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -383,7 +383,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: Optional[NDArray[Any, np.float32]] = Field(
x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -392,7 +392,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: Optional[NDArray[Any, np.float32]] = Field(
y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -401,7 +401,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: Optional[NDArray[Any, np.float32]] = Field(
z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -410,7 +410,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: Optional[NDArray[Any, np.float32]] = Field(
imp: Optional[NDArray[Any, float]] = Field(
None,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@ -449,7 +449,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -458,7 +458,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -467,7 +467,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -576,7 +576,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -747,7 +745,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -71,15 +71,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -94,15 +94,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -117,15 +117,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -141,13 +141,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -169,12 +168,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -205,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -225,13 +224,12 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -253,12 +251,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -286,24 +284,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -325,12 +322,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -358,7 +355,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.uint32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the image (using zero-indexing) in the linked Images object.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,12 +369,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -445,7 +445,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -478,17 +478,17 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
json_schema_extra={
@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,25 +114,23 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -154,12 +152,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -188,8 +186,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -207,12 +204,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -372,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -401,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -444,9 +441,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

View file

@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(
None, description="""Unit that axis data is stored in (e.g., degrees)."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
focal_depth: Optional[np.float32] = Field(
None, description="""Focal depth offset, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
bits_per_pixel: Optional[np.int32] = Field(
bits_per_pixel: Optional[int] = Field(
None,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
dimension: Optional[np.int32] = Field(
dimension: Optional[int] = Field(
None,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
field_of_view: Optional[np.float32] = Field(
None, description="""Size of viewing area, in meters."""
)
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(
None, description="""Format of image. Right now only 'raw' is supported."""
)
array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}

View file

@ -114,11 +114,11 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
idx_start: np.int32 = Field(
idx_start: int = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
)
count: np.int32 = Field(
count: int = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
)
@ -146,15 +146,15 @@ class Image(NWBData):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -221,12 +221,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -255,15 +255,15 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
conversion: Optional[np.float32] = Field(
conversion: Optional[float] = Field(
None,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
)
offset: Optional[np.float32] = Field(
offset: Optional[float] = Field(
None,
description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
)
@ -298,11 +298,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""")
unit: Optional[str] = Field(
None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
)
value: np.float64 = Field(...)
value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):

View file

@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -133,10 +133,10 @@ class SpatialSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, 1 x"], np.number],
NDArray[Shape["* num_times, 2 x_y"], np.number],
NDArray[Shape["* num_times, 3 x_y_z"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, 1 x"], float],
NDArray[Shape["* num_times, 2 x_y"], float],
NDArray[Shape["* num_times, 3 x_y_z"], float],
]
] = Field(None)

View file

@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_channels"], np.number],
NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_channels"], float],
NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_events, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
NDArray[Shape["* num_events, * num_samples"], float],
NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
x: Optional[np.float32] = Field(None, description="""x coordinate""")
y: Optional[np.float32] = Field(None, description="""y coordinate""")
z: Optional[np.float32] = Field(None, description="""z coordinate""")
x: Optional[float] = Field(None, description="""x coordinate""")
y: Optional[float] = Field(None, description="""y coordinate""")
z: Optional[float] = Field(None, description="""z coordinate""")
class ClusterWaveforms(NWBDataInterface):
@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@ -432,17 +432,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
num: NDArray[Shape["* num_events"], np.int32] = Field(
num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
times: NDArray[Shape["* num_events"], np.float64] = Field(
times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},

View file

@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
start_time: NDArray[Any, np.float32] = Field(
start_time: NDArray[Any, float] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable):
}
},
)
stop_time: NDArray[Any, np.float32] = Field(
stop_time: NDArray[Any, float] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={

View file

@ -127,7 +127,7 @@ class NWBFile(NWBContainer):
None,
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
)
file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@ -141,11 +141,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
session_start_time: np.datetime64 = Field(
session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
timestamps_reference_time: np.datetime64 = Field(
timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@ -383,7 +383,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
x: Optional[NDArray[Any, np.float32]] = Field(
x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@ -392,7 +392,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
y: Optional[NDArray[Any, np.float32]] = Field(
y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@ -401,7 +401,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
z: Optional[NDArray[Any, np.float32]] = Field(
z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@ -410,7 +410,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
imp: Optional[NDArray[Any, np.float32]] = Field(
imp: Optional[NDArray[Any, float]] = Field(
None,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@ -449,7 +449,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_x: Optional[NDArray[Any, np.float32]] = Field(
rel_x: Optional[NDArray[Any, float]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@ -458,7 +458,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_y: Optional[NDArray[Any, np.float32]] = Field(
rel_y: Optional[NDArray[Any, float]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@ -467,7 +467,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
rel_z: Optional[NDArray[Any, np.float32]] = Field(
rel_z: Optional[NDArray[Any, float]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@ -576,7 +576,7 @@ class Subject(NWBContainer):
age: Optional[SubjectAge] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
date_of_birth: Optional[np.datetime64] = Field(
date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(

View file

@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[np.float32] = Field(
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
)
bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: np.float32 = Field(
..., description="""Bridge balance, in ohms, fixed to 0.0."""
)
capacitance_compensation: np.float32 = Field(
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
None,
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
None,
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
None,
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
)
value: np.float32 = Field(...)
value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries):
stimulus_description: Optional[str] = Field(
None, description="""Protocol/stimulus name for this patch-clamp dataset."""
)
sweep_number: Optional[np.uint32] = Field(
sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
gain: Optional[np.float32] = Field(
gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -747,7 +745,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
sweep_number: NDArray[Any, np.uint32] = Field(
sweep_number: NDArray[Any, int] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={

View file

@ -76,15 +76,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -99,15 +99,15 @@ class RGBImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -122,15 +122,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
resolution: Optional[np.float32] = Field(
resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
array: Optional[
Union[
NDArray[Shape["* x, * y"], np.number],
NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@ -146,13 +146,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -174,12 +173,12 @@ class ImageSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -210,7 +209,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
starting_frame: Optional[np.int32] = Field(
starting_frame: Optional[int] = Field(
None,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
@ -230,13 +229,12 @@ class ImageMaskSeries(ImageSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -258,12 +256,12 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -291,24 +289,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
distance: Optional[np.float32] = Field(
distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
NDArray[Shape["* frame, * x, * y"], float],
NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -330,12 +327,12 @@ class OpticalSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -363,7 +360,7 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.uint32] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the image (using zero-indexing) in the linked Images object.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -377,12 +374,12 @@ class IndexSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},

View file

@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
)
array: Optional[
Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_features"], np.number],
NDArray[Shape["* num_times"], float],
NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.int8] = Field(
data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel):
None,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
band_mean: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
band_stdev: NDArray[Shape["* num_bands"], float] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
@ -445,7 +445,7 @@ class Units(DynamicTable):
"linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
},
)
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field(
None,
description="""Observation intervals for each unit.""",
json_schema_extra={
@ -478,17 +478,17 @@ class Units(DynamicTable):
)
waveform_mean: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: Optional[
Union[
NDArray[Shape["* num_units, * num_samples"], np.float32],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field(
None,
description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
json_schema_extra={
@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
resolution: Optional[np.float64] = Field(
resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)

View file

@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
data: NDArray[Shape["* num_times"], np.number] = Field(
data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",

View file

@ -114,31 +114,28 @@ class OnePhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
exposure_time: Optional[np.float32] = Field(
exposure_time: Optional[float] = Field(
None, description="""Exposure time of the sample; often the inverse of the frequency."""
)
binning: Optional[np.uint8] = Field(
binning: Optional[int] = Field(
None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
)
power: Optional[np.float32] = Field(
None, description="""Power of the excitation in mW, if known."""
)
intensity: Optional[np.float32] = Field(
power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
intensity: Optional[float] = Field(
None, description="""Intensity of the excitation in mW/mm^2, if known."""
)
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -160,12 +157,12 @@ class OnePhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -193,25 +190,23 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[np.float32] = Field(
pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
NDArray[Shape["2 width_height"], np.float32],
NDArray[Shape["3 width_height_depth"], np.float32],
NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
NDArray[Shape["* frame, * x, * y"], np.number],
NDArray[Shape["* frame, * x, * y, * z"], np.number],
NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@ -233,12 +228,12 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -267,8 +262,7 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
NDArray[Shape["* num_times"], np.number],
NDArray[Shape["* num_times, * num_rois"], np.number],
NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
@ -286,12 +280,12 @@ class RoiResponseSeries(TimeSeries):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@ -451,9 +445,9 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
x: Optional[int] = Field(None, description="""Pixel x-coordinate.""")
y: Optional[int] = Field(None, description="""Pixel y-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the pixel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -480,10 +474,10 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
x: Optional[int] = Field(None, description="""Voxel x-coordinate.""")
y: Optional[int] = Field(None, description="""Voxel y-coordinate.""")
z: Optional[int] = Field(None, description="""Voxel z-coordinate.""")
weight: Optional[float] = Field(None, description="""Weight of the voxel.""")
description: Optional[str] = Field(
None, description="""Description of what these vectors represent."""
)
@ -523,9 +517,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
emission_lambda: np.float32 = Field(
..., description="""Emission wavelength for channel, in nm."""
)
emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):

Some files were not shown because too many files have changed in this diff Show more