initial structure for sqlmodel

This commit is contained in:
sneakers-the-rat 2023-09-04 13:49:07 -07:00
parent 9dd7304334
commit 07110a0dd1
79 changed files with 976 additions and 505 deletions

View file

@ -51,7 +51,10 @@ class NamespacesAdapter(Adapter):
id = ns.name,
description = ns.doc,
version = ns.version,
imports=[sch.name for sch in ns_schemas]
imports=[sch.name for sch in ns_schemas],
default_prefix=ns.name,
prefixes={ns.name: f'https://example.com/{ns.name}/'},
)
sch_result.schemas.append(ns_schema)

View file

@ -87,6 +87,8 @@ class SchemaAdapter(Adapter):
sch = SchemaDefinition(
name = self.name,
id = self.name,
default_prefix=self.name,
prefixes={self.name:f'https://example.com/{self.name}/'},
imports = [i.name for i in self.imports],
classes=res.classes,
slots=res.slots,
@ -128,7 +130,10 @@ class SchemaAdapter(Adapter):
imports=main_imports,
classes=main_classes,
slots=classes.slots,
types=classes.types
types=classes.types,
default_prefix=self.name,
prefixes={self.name: f'https://example.com/{self.name}/'},
)
split_sch = SchemaDefinition(
@ -137,7 +142,10 @@ class SchemaAdapter(Adapter):
imports=imports,
classes=split_classes,
slots=classes.slots,
types=classes.types
types=classes.types,
default_prefix=self.name,
prefixes={self.name: f'https://example.com/{self.name}/'},
)
if len(split_classes) > 0:
res = BuildResult(

View file

@ -17,7 +17,7 @@ The `serialize` method
"""
import pdb
from typing import List, Dict, Set
from typing import List, Dict, Set, Optional
from copy import deepcopy
import warnings
@ -384,7 +384,7 @@ class NWBPydanticGenerator(PydanticGenerator):
parents[camelcase(class_def.name)] = class_parents
return parents
def serialize(self) -> str:
def serialize(self, schemaview:Optional[SchemaView]=None) -> str:
if self.template_file is not None:
with open(self.template_file) as template_file:
template_obj = Template(template_file.read())
@ -392,7 +392,10 @@ class NWBPydanticGenerator(PydanticGenerator):
template_obj = Template(default_template(self.pydantic_version))
sv: SchemaView
sv = self.schemaview
if schemaview:
sv = schemaview
else:
sv = self.schemaview
schema = sv.schema
pyschema = SchemaDefinition(
id=schema.id,

View file

@ -0,0 +1,207 @@
import pdb
from collections import defaultdict
from linkml.generators.sqltablegen import SQLTableGenerator
from linkml.transformers.relmodel_transformer import ForeignKeyPolicy, RelationalModelTransformer
from linkml.utils.generator import Generator, shared_arguments
from linkml_runtime.utils.schemaview import SchemaView
from linkml_runtime.linkml_model import (
Annotation,
ClassDefinition,
ClassDefinitionName,
SchemaDefinition,
)
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
def default_template(pydantic_ver: str = "2") -> str:
"""Constructs a default template for pydantic classes based on the version of pydantic"""
### HEADER ###
template = """
{#-
Jinja2 Template for a pydantic classes
-#}
from __future__ import annotations
from datetime import datetime, date
from enum import Enum
from typing import List, Dict, Optional, Any, Union
from sqlmodel import SQLModel, Field
from nptyping import NDArray, Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
import sys
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
{% for import_module, import_classes in imports.items() %}
from {{ import_module }} import (
{{ import_classes | join(',\n ') }}
)
{% endfor %}
metamodel_version = "{{metamodel_version}}"
version = "{{version if version else None}}"
"""
### BASE MODEL ###
if pydantic_ver == "1":
template += """
class WeakRefShimBaseModel(BaseModel):
__slots__ = '__weakref__'
class ConfiguredBaseModel(WeakRefShimBaseModel,
validate_assignment = True,
validate_all = True,
underscore_attrs_are_private = True,
extra = {% if allow_extra %}'allow'{% else %}'forbid'{% endif %},
arbitrary_types_allowed = True,
use_enum_values = True):
pass
"""
else:
template += """
class ConfiguredBaseModel(BaseModel,
validate_assignment = True,
validate_default = True,
extra = {% if allow_extra %}'allow'{% else %}'forbid'{% endif %},
arbitrary_types_allowed = True,
use_enum_values = True):
pass
"""
### ENUMS ###
template += """
{% for e in enums.values() %}
class {{ e.name }}(str, Enum):
{% if e.description -%}
\"\"\"
{{ e.description }}
\"\"\"
{%- endif %}
{% for _, pv in e['values'].items() -%}
{% if pv.description -%}
# {{pv.description}}
{%- endif %}
{{pv.label}} = "{{pv.value}}"
{% endfor %}
{% if not e['values'] -%}
dummy = "dummy"
{% endif %}
{% endfor %}
"""
### CLASSES ###
template += """
{%- for c in schema.classes.values() %}
class {{ c.name }}
{%- if class_isa_plus_mixins[c.name] -%}
({{class_isa_plus_mixins[c.name]|join(', ')}}, table=True)
{%- else -%}
(ConfiguredBaseModel, table=True)
{%- endif -%}
:
{% if c.description -%}
\"\"\"
{{ c.description }}
\"\"\"
{%- endif %}
{% for attr in c.attributes.values() if c.attributes -%}
{{attr.name}}: {{ attr.annotations['python_range'].value }} = Field(
{%- if predefined_slot_values[c.name][attr.name] -%}
{{ predefined_slot_values[c.name][attr.name] }}
{%- if attr.equals_string -%}
, const=True
{%- endif -%}
{%- elif attr.required -%}
...
{%- else -%}
None
{%- endif -%}
{%- if attr.title != None %}, title="{{attr.title}}"{% endif -%}
{%- if attr.description %}, description=\"\"\"{{attr.description}}\"\"\"{% endif -%}
{%- if attr.minimum_value != None %}, ge={{attr.minimum_value}}{% endif -%}
{%- if attr.maximum_value != None %}, le={{attr.maximum_value}}{% endif -%}
{%- if 'foreign_key' in s.annotations -%}, foreign_key='{{ s.annotations['foreign_key'].value }}' {%- endif -%}
{%- if 'primary_key' in s.annotations -%}, primary_key=True {%- endif -%}
)
{% else -%}
None
{% endfor %}
{% endfor %}
"""
### FWD REFS / REBUILD MODEL ###
if pydantic_ver == "1":
template += """
# Update forward refs
# see https://pydantic-docs.helpmanual.io/usage/postponed_annotations/
{% for c in schema.classes.values() -%}
{{ c.name }}.update_forward_refs()
{% endfor %}
"""
else:
template += """
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
{% for c in schema.classes.values() -%}
{{ c.name }}.model_rebuild()
{% endfor %}
"""
return template
class SQLModelGenerator(NWBPydanticGenerator):
"""
Generate an SQLModels-compatible model
"""
def generate_sqla(
self,
foreign_key_policy: ForeignKeyPolicy = None,
**kwargs
):
"""
Adapted from :meth:`linkml.generators.sqlalchemygen.SQLAlchemyGenerator.generate_sqla`
Need to add SQL annotations to pydantic before passing to
the template, but original classes don't return generated values
- Accept as arguments:
-
Returns:
- mappings=tr_result.mappings
- backrefs=backrefs
- is_join_table
"""
sqltr = RelationalModelTransformer(self.schemaview)
tr_result = sqltr.transform(**kwargs)
tgen = SQLTableGenerator(self.schemaview.schema)
tr_schema = tr_result.schema
pdb.set_trace()
for c in tr_schema.classes.values():
for a in c.attributes.values():
sql_range = tgen.get_sql_range(a, tr_schema)
sql_type = sql_range.__repr__()
ann = Annotation("sql_type", sql_type)
a.annotations[ann.tag] = ann
backrefs = defaultdict(list)
for m in tr_result.mappings:
backrefs[m.source_class].append(m)
tr_sv = SchemaView(tr_schema)
rel_schema_classes_ordered = [
tr_sv.get_class(cn, strict=True) for cn in tr_sv.all_classes()
]
for c in rel_schema_classes_ordered:
# For SQLA there needs to be a primary key for each class;
# autogenerate this as a compound key if none declared
has_pk = any(a for a in c.attributes.values() if "primary_key" in a.annotations)
if not has_pk:
for a in c.attributes.values():
ann = Annotation("primary_key", "true")
a.annotations[ann.tag] = ann
return tr_sv, backrefs

View file

@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import \
TypeDefinition,\
Prefix,\
PermissibleValue
from nwb_linkml.maps.dtype import flat_to_linkml
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_python
FlatDType = EnumDefinition(
@ -57,7 +57,8 @@ for nwbtype, linkmltype in flat_to_linkml.items():
atype = TypeDefinition(
name=nwbtype,
minimum_value=amin,
typeof=linkmltype
typeof=linkmltype,
base=flat_to_python[nwbtype]
)
DTypeTypes.append(atype)
@ -89,6 +90,10 @@ NwbLangSchema = SchemaDefinition(
classes=[Arraylike, AnyType],
types=DTypeTypes,
imports=['linkml:types'],
prefixes={'linkml': Prefix('linkml','https://w3id.org/linkml')}
prefixes={
'linkml': Prefix('linkml','https://w3id.org/linkml'),
'nwb.language': f'https://example.com/nwb.language/'},
default_prefix='nwb.language',
)

View file

@ -56,4 +56,31 @@ flat_to_npytyping = {
"bool": "Bool",
"isodatetime": "Datetime64",
'AnyType': 'Any'
}
flat_to_python = {
"float" : "float",
"float32" : "float",
"double" : "double",
"float64" : "double",
"long" : "int",
"int64" : "int",
"int" : "int",
"int32" : "int",
"int16" : "int",
"short" : "int",
"int8" : "int",
"uint" : "int",
"uint32" : "int",
"uint16" : "int",
"uint8" : "int",
"uint64" : "int",
"numeric" : "float",
"text" : "string",
"utf" : "string",
"utf8" : "string",
"utf_8" : "string",
"ascii" : "string",
"bool" : "boolean",
"isodatetime" : "datetime"
}

View file

View file

@ -11,9 +11,13 @@ else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTable,
VectorData
from .core_nwb_base_include import (
ImageArray,
TimeSeriesStartingTime,
ImageReferencesArray,
ImagesOrderOfImages,
TimeSeriesData,
TimeSeriesSync
)
from .hdmf_common_base import (
@ -21,13 +25,9 @@ from .hdmf_common_base import (
Container
)
from .core_nwb_base_include import (
TimeSeriesStartingTime,
ImageArray,
ImageReferencesArray,
TimeSeriesSync,
ImagesOrderOfImages,
TimeSeriesData
from .hdmf_common_table import (
VectorData,
DynamicTable
)

View file

@ -11,6 +11,10 @@ else:
from typing_extensions import Literal
from .core_nwb_misc import (
IntervalSeries
)
from .core_nwb_base import (
TimeSeries,
NWBDataInterface
@ -20,10 +24,6 @@ from .core_nwb_behavior_include import (
SpatialSeriesData
)
from .core_nwb_misc import (
IntervalSeries
)
metamodel_version = "None"
version = "None"
@ -43,13 +43,13 @@ class SpatialSeries(TimeSeries):
"""
name: str = Field(...)
data: SpatialSeriesData = Field(..., description="""1-D or 2-D array storing position or direction relative to some reference frame.""")
reference_frame: Optional[str] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
reference_frame: Optional[string] = Field(None, description="""Description defining what exactly 'straight-ahead' means.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")

View file

@ -11,20 +11,20 @@ else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeries,
NWBContainer,
NWBDataInterface
from .core_nwb_ecephys_include import (
FeatureExtractionFeatures,
SpikeEventSeriesData,
FeatureExtractionElectrodes,
ClusterWaveformsWaveformMean,
ClusterWaveformsWaveformSd,
ElectricalSeriesElectrodes,
ElectricalSeriesData
)
from .core_nwb_ecephys_include import (
FeatureExtractionElectrodes,
ClusterWaveformsWaveformSd,
ClusterWaveformsWaveformMean,
SpikeEventSeriesData,
ElectricalSeriesElectrodes,
ElectricalSeriesData,
FeatureExtractionFeatures
from .core_nwb_base import (
NWBDataInterface,
NWBContainer,
TimeSeries
)

View file

@ -11,14 +11,14 @@ else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion
)
from .nwb_language import (
Arraylike
)
from .hdmf_common_table import (
DynamicTableRegion
)
metamodel_version = "None"
version = "None"

View file

@ -16,9 +16,9 @@ from .hdmf_common_table import (
)
from .core_nwb_epoch_include import (
TimeIntervalsTimeseriesIndex,
TimeIntervalsTagsIndex,
TimeIntervalsTimeseries,
TimeIntervalsTagsIndex
TimeIntervalsTimeseriesIndex
)

View file

@ -11,14 +11,14 @@ else:
from typing_extensions import Literal
from .hdmf_common_table import (
VectorIndex
)
from .core_nwb_base import (
TimeSeriesReferenceVectorData
)
from .hdmf_common_table import (
VectorIndex
)
metamodel_version = "None"
version = "None"

View file

@ -11,21 +11,10 @@ else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTable
)
from .core_nwb_base import (
NWBData,
ProcessingModule,
NWBDataInterface,
NWBContainer
)
from .core_nwb_file_include import (
NWBFileGeneral,
SubjectAge,
NWBFileIntervals,
SubjectAge,
NWBFileStimulus
)
@ -33,6 +22,17 @@ from .core_nwb_misc import (
Units
)
from .core_nwb_base import (
NWBDataInterface,
ProcessingModule,
NWBContainer,
NWBData
)
from .hdmf_common_table import (
DynamicTable
)
metamodel_version = "None"
version = "None"

View file

@ -11,36 +11,40 @@ else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeries,
Images
)
from .core_nwb_icephys import (
ExperimentalConditionsTable,
SweepTable,
IntracellularElectrode,
SequentialRecordingsTable,
RepetitionsTable,
SimultaneousRecordingsTable,
IntracellularRecordingsTable
SequentialRecordingsTable,
SweepTable,
RepetitionsTable,
IntracellularElectrode,
IntracellularRecordingsTable,
ExperimentalConditionsTable
)
from .core_nwb_ogen import (
OptogeneticStimulusSite
from .core_nwb_file import (
Subject,
LabMetaData
)
from .hdmf_common_table import (
DynamicTable
)
from .core_nwb_epoch import (
TimeIntervals
)
from .core_nwb_file import (
LabMetaData,
Subject
from .core_nwb_ogen import (
OptogeneticStimulusSite
)
from .hdmf_common_table import (
DynamicTable
from .core_nwb_ophys import (
ImagingPlane
)
from .core_nwb_base import (
Images,
TimeSeries
)
from .core_nwb_device import (
@ -51,10 +55,6 @@ from .core_nwb_ecephys import (
ElectrodeGroup
)
from .core_nwb_ophys import (
ImagingPlane
)
metamodel_version = "None"
version = "None"

View file

@ -11,39 +11,39 @@ else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeries,
NWBContainer
)
from .core_nwb_icephys_include import (
VoltageClampSeriesCapacitanceSlow,
ExperimentalConditionsTableRepetitions,
VoltageClampStimulusSeriesData,
IntracellularResponsesTableResponse,
ExperimentalConditionsTableRepetitionsIndex,
VoltageClampSeriesResistanceCompPrediction,
SimultaneousRecordingsTableRecordingsIndex,
CurrentClampStimulusSeriesData,
VoltageClampSeriesWholeCellSeriesResistanceComp,
VoltageClampStimulusSeriesData,
VoltageClampSeriesCapacitanceSlow,
SimultaneousRecordingsTableRecordings,
VoltageClampSeriesResistanceCompPrediction,
SequentialRecordingsTableSimultaneousRecordings,
VoltageClampSeriesCapacitanceFast,
RepetitionsTableSequentialRecordingsIndex,
IntracellularStimuliTableStimulus,
VoltageClampSeriesResistanceCompCorrection,
SequentialRecordingsTableSimultaneousRecordingsIndex,
SimultaneousRecordingsTableRecordings,
IntracellularResponsesTableResponse,
RepetitionsTableSequentialRecordingsIndex,
VoltageClampSeriesResistanceCompBandwidth,
CurrentClampSeriesData,
SimultaneousRecordingsTableRecordingsIndex,
VoltageClampSeriesData,
SequentialRecordingsTableSimultaneousRecordingsIndex,
ExperimentalConditionsTableRepetitions,
RepetitionsTableSequentialRecordings,
VoltageClampSeriesWholeCellCapacitanceComp,
CurrentClampStimulusSeriesData,
CurrentClampSeriesData,
VoltageClampSeriesResistanceCompCorrection,
SweepTableSeriesIndex
)
from .core_nwb_base import (
NWBContainer,
TimeSeries
)
from .hdmf_common_table import (
DynamicTable,
AlignedDynamicTable
AlignedDynamicTable,
DynamicTable
)
@ -64,16 +64,16 @@ class PatchClampSeries(TimeSeries):
An abstract base class for patch-clamp data - stimulus or response, current or voltage.
"""
name: str = Field(...)
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
stimulus_description: Optional[string] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
data: List[float] = Field(default_factory=list, description="""Recorded voltage or current.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -86,15 +86,15 @@ class CurrentClampSeries(PatchClampSeries):
bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
capacitance_compensation: Optional[float] = Field(None, description="""Capacitance compensation, in farads.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
stimulus_description: Optional[string] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -103,19 +103,19 @@ class IZeroClampSeries(CurrentClampSeries):
Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell.
"""
name: str = Field(...)
stimulus_description: Optional[str] = Field(None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""")
stimulus_description: Optional[string] = Field(None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""")
bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
capacitance_compensation: float = Field(..., description="""Capacitance compensation, in farads, fixed to 0.0.""")
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -125,15 +125,15 @@ class CurrentClampStimulusSeries(PatchClampSeries):
"""
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
stimulus_description: Optional[string] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -150,15 +150,15 @@ class VoltageClampSeries(PatchClampSeries):
resistance_comp_prediction: Optional[VoltageClampSeriesResistanceCompPrediction] = Field(None, description="""Resistance compensation prediction, in percent.""")
whole_cell_capacitance_comp: Optional[VoltageClampSeriesWholeCellCapacitanceComp] = Field(None, description="""Whole cell capacitance compensation, in farads.""")
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = Field(None, description="""Whole cell series resistance compensation, in ohms.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
stimulus_description: Optional[string] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -168,15 +168,15 @@ class VoltageClampStimulusSeries(PatchClampSeries):
"""
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
stimulus_description: Optional[str] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[int] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
stimulus_description: Optional[string] = Field(None, description="""Protocol/stimulus name for this patch-clamp dataset.""")
sweep_number: Optional[integer] = Field(None, description="""Sweep number, allows to group different PatchClampSeries together.""")
gain: Optional[float] = Field(None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -185,14 +185,14 @@ class IntracellularElectrode(NWBContainer):
An intracellular electrode and its metadata.
"""
name: str = Field(...)
cell_id: Optional[str] = Field(None, description="""unique ID of the cell""")
description: str = Field(..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""")
filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""")
initial_access_resistance: Optional[str] = Field(None, description="""Initial access resistance.""")
location: Optional[str] = Field(None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[str] = Field(None, description="""Information about seal used for recording.""")
slice: Optional[str] = Field(None, description="""Information about slice used for recording.""")
cell_id: Optional[string] = Field(None, description="""unique ID of the cell""")
description: string = Field(..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""")
filtering: Optional[string] = Field(None, description="""Electrode specific filtering.""")
initial_access_resistance: Optional[string] = Field(None, description="""Initial access resistance.""")
location: Optional[string] = Field(None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""")
resistance: Optional[string] = Field(None, description="""Electrode resistance, in ohms.""")
seal: Optional[string] = Field(None, description="""Information about seal used for recording.""")
slice: Optional[string] = Field(None, description="""Information about slice used for recording.""")
class SweepTable(DynamicTable):
@ -200,12 +200,12 @@ class SweepTable(DynamicTable):
[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata.
"""
name: str = Field(...)
sweep_number: Optional[List[int]] = Field(default_factory=list, description="""Sweep number of the PatchClampSeries in that row.""")
sweep_number: Optional[List[integer]] = Field(default_factory=list, description="""Sweep number of the PatchClampSeries in that row.""")
series: Optional[List[PatchClampSeries]] = Field(default_factory=list, description="""The PatchClampSeries with the sweep number in that row.""")
series_index: SweepTableSeriesIndex = Field(..., description="""Index for series.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -214,10 +214,10 @@ class IntracellularElectrodesTable(DynamicTable):
Table for storing intracellular electrode related metadata.
"""
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
electrode: Optional[List[IntracellularElectrode]] = Field(default_factory=list, description="""Column for storing the reference to the intracellular electrode.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -226,10 +226,10 @@ class IntracellularStimuliTable(DynamicTable):
Table for storing intracellular stimulus related metadata.
"""
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
stimulus: IntracellularStimuliTableStimulus = Field(..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -238,10 +238,10 @@ class IntracellularResponsesTable(DynamicTable):
Table for storing intracellular response related metadata.
"""
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
response: IntracellularResponsesTableResponse = Field(..., description="""Column storing the reference to the recorded response for the recording (rows)""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -250,14 +250,14 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.
"""
name: str = Field("intracellular_recordings", const=True)
description: Optional[str] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""")
description: Optional[string] = Field(None, description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""")
electrodes: IntracellularElectrodesTable = Field(..., description="""Table for storing intracellular electrode related metadata.""")
stimuli: IntracellularStimuliTable = Field(..., description="""Table for storing intracellular stimulus related metadata.""")
responses: IntracellularResponsesTable = Field(..., description="""Table for storing intracellular response related metadata.""")
categories: Optional[str] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
categories: Optional[string] = Field(None, description="""The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group.""")
dynamic_table: Optional[List[DynamicTable]] = Field(default_factory=list, description="""A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -268,9 +268,9 @@ class SimultaneousRecordingsTable(DynamicTable):
name: str = Field("simultaneous_recordings", const=True)
recordings: SimultaneousRecordingsTableRecordings = Field(..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""")
recordings_index: SimultaneousRecordingsTableRecordingsIndex = Field(..., description="""Index dataset for the recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -281,10 +281,10 @@ class SequentialRecordingsTable(DynamicTable):
name: str = Field("sequential_recordings", const=True)
simultaneous_recordings: SequentialRecordingsTableSimultaneousRecordings = Field(..., description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""")
simultaneous_recordings_index: SequentialRecordingsTableSimultaneousRecordingsIndex = Field(..., description="""Index dataset for the simultaneous_recordings column.""")
stimulus_type: Optional[List[str]] = Field(default_factory=list, description="""The type of stimulus used for the sequential recording.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
stimulus_type: Optional[List[string]] = Field(default_factory=list, description="""The type of stimulus used for the sequential recording.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -295,9 +295,9 @@ class RepetitionsTable(DynamicTable):
name: str = Field("repetitions", const=True)
sequential_recordings: RepetitionsTableSequentialRecordings = Field(..., description="""A reference to one or more rows in the SequentialRecordingsTable table.""")
sequential_recordings_index: RepetitionsTableSequentialRecordingsIndex = Field(..., description="""Index dataset for the sequential_recordings column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")
@ -308,9 +308,9 @@ class ExperimentalConditionsTable(DynamicTable):
name: str = Field("experimental_conditions", const=True)
repetitions: ExperimentalConditionsTableRepetitions = Field(..., description="""A reference to one or more rows in the RepetitionsTable table.""")
repetitions_index: ExperimentalConditionsTableRepetitionsIndex = Field(..., description="""Index dataset for the repetitions column.""")
colnames: Optional[str] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[str] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[int] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
colnames: Optional[string] = Field(None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""")
description: Optional[string] = Field(None, description="""Description of what is in this dynamic table.""")
id: List[integer] = Field(default_factory=list, description="""Array of unique identifiers for the rows of this dynamic table.""")
VectorData: Optional[List[VectorData]] = Field(default_factory=list, description="""Vector columns, including index columns, of this dynamic table.""")

View file

@ -11,16 +11,16 @@ else:
from typing_extensions import Literal
from .hdmf_common_table import (
DynamicTableRegion,
VectorIndex
)
from .core_nwb_icephys import (
IntracellularRecordingsTable,
SequentialRecordingsTable,
RepetitionsTable,
SimultaneousRecordingsTable,
IntracellularRecordingsTable
SimultaneousRecordingsTable
)
from .hdmf_common_table import (
VectorIndex,
DynamicTableRegion
)
from .core_nwb_base import (
@ -45,7 +45,7 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
Recorded voltage.
"""
name: str = Field("data", const=True)
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
unit: Optional[string] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
@ -53,7 +53,7 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
Stimulus current applied.
"""
name: str = Field("data", const=True)
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
unit: Optional[string] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class VoltageClampSeriesData(ConfiguredBaseModel):
@ -61,7 +61,7 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
Recorded current.
"""
name: str = Field("data", const=True)
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
unit: Optional[string] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
@ -69,7 +69,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
Fast capacitance, in farads.
"""
name: str = Field("capacitance_fast", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@ -77,7 +77,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
Slow capacitance, in farads.
"""
name: str = Field("capacitance_slow", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""")
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@ -85,7 +85,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
Resistance compensation bandwidth, in hertz.
"""
name: str = Field("resistance_comp_bandwidth", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""")
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@ -93,7 +93,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
Resistance compensation correction, in percent.
"""
name: str = Field("resistance_comp_correction", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""")
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@ -101,7 +101,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
Resistance compensation prediction, in percent.
"""
name: str = Field("resistance_comp_prediction", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""")
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@ -109,7 +109,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
Whole cell capacitance compensation, in farads.
"""
name: str = Field("whole_cell_capacitance_comp", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""")
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@ -117,7 +117,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
Whole cell series resistance compensation, in ohms.
"""
name: str = Field("whole_cell_series_resistance_comp", const=True)
unit: Optional[str] = Field(None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""")
unit: Optional[string] = Field(None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""")
class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
@ -125,7 +125,7 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
Stimulus voltage applied.
"""
name: str = Field("data", const=True)
unit: Optional[str] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
unit: Optional[string] = Field(None, description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""")
class SweepTableSeriesIndex(VectorIndex):
@ -134,7 +134,7 @@ class SweepTableSeriesIndex(VectorIndex):
"""
name: str = Field("series_index", const=True)
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -148,7 +148,7 @@ class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData):
Column storing the reference to the recorded stimulus for the recording (rows).
"""
name: str = Field("stimulus", const=True)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -162,7 +162,7 @@ class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData):
Column storing the reference to the recorded response for the recording (rows)
"""
name: str = Field("response", const=True)
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -177,7 +177,7 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"""
name: str = Field("recordings", const=True)
table: Optional[IntracellularRecordingsTable] = Field(None, description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
description: Optional[string] = Field(None, description="""Description of what this table region points to.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -192,7 +192,7 @@ class SimultaneousRecordingsTableRecordingsIndex(VectorIndex):
"""
name: str = Field("recordings_index", const=True)
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -207,7 +207,7 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
"""
name: str = Field("simultaneous_recordings", const=True)
table: Optional[SimultaneousRecordingsTable] = Field(None, description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
description: Optional[string] = Field(None, description="""Description of what this table region points to.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -222,7 +222,7 @@ class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex):
"""
name: str = Field("simultaneous_recordings_index", const=True)
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -237,7 +237,7 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion):
"""
name: str = Field("sequential_recordings", const=True)
table: Optional[SequentialRecordingsTable] = Field(None, description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
description: Optional[string] = Field(None, description="""Description of what this table region points to.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -252,7 +252,7 @@ class RepetitionsTableSequentialRecordingsIndex(VectorIndex):
"""
name: str = Field("sequential_recordings_index", const=True)
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -267,7 +267,7 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"""
name: str = Field("repetitions", const=True)
table: Optional[RepetitionsTable] = Field(None, description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""")
description: Optional[str] = Field(None, description="""Description of what this table region points to.""")
description: Optional[string] = Field(None, description="""Description of what this table region points to.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@ -282,7 +282,7 @@ class ExperimentalConditionsTableRepetitionsIndex(VectorIndex):
"""
name: str = Field("repetitions_index", const=True)
target: Optional[VectorData] = Field(None, description="""Reference to the target dataset that this index applies to.""")
description: Optional[str] = Field(None, description="""Description of what these vectors represent.""")
description: Optional[string] = Field(None, description="""Description of what these vectors represent.""")
array: Optional[Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],

View file

@ -11,20 +11,20 @@ else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeries,
Image
)
from .core_nwb_image_include import (
ImageSeriesData,
RGBAImageArray,
GrayscaleImageArray,
RGBImageArray,
ImageSeriesData,
OpticalSeriesFieldOfView,
RGBAImageArray,
OpticalSeriesData
)
from .core_nwb_base import (
Image,
TimeSeries
)
metamodel_version = "None"
version = "None"
@ -45,7 +45,7 @@ class GrayscaleImage(Image):
name: str = Field(...)
array: Optional[NDArray[Shape["* x, * y"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[string] = Field(None, description="""Description of the image.""")
class RGBImage(Image):
@ -55,7 +55,7 @@ class RGBImage(Image):
name: str = Field(...)
array: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[string] = Field(None, description="""Description of the image.""")
class RGBAImage(Image):
@ -65,7 +65,7 @@ class RGBAImage(Image):
name: str = Field(...)
array: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], Number]] = Field(None)
resolution: Optional[float] = Field(None, description="""Pixel resolution of the image, in pixels per centimeter.""")
description: Optional[str] = Field(None, description="""Description of the image.""")
description: Optional[string] = Field(None, description="""Description of the image.""")
class ImageSeries(TimeSeries):
@ -74,15 +74,15 @@ class ImageSeries(TimeSeries):
"""
name: str = Field(...)
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[List[int]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[str]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
dimension: Optional[List[integer]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[string]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[string] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -92,15 +92,15 @@ class ImageMaskSeries(ImageSeries):
"""
name: str = Field(...)
data: ImageSeriesData = Field(..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""")
dimension: Optional[List[int]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[str]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
dimension: Optional[List[integer]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[string]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[string] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -112,16 +112,16 @@ class OpticalSeries(ImageSeries):
distance: Optional[float] = Field(None, description="""Distance from camera/monitor to target/eye.""")
field_of_view: Optional[OpticalSeriesFieldOfView] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: OpticalSeriesData = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""")
dimension: Optional[List[int]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[str]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[str] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
orientation: Optional[string] = Field(None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""")
dimension: Optional[List[integer]] = Field(default_factory=list, description="""Number of pixels on x, y, (and z) axes.""")
external_file: Optional[List[string]] = Field(default_factory=list, description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""")
format: Optional[string] = Field(None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")
@ -130,13 +130,13 @@ class IndexSeries(TimeSeries):
Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
"""
name: str = Field(...)
data: List[int] = Field(default_factory=list, description="""Index of the image (using zero-indexing) in the linked Images object.""")
description: Optional[str] = Field(None, description="""Description of the time series.""")
comments: Optional[str] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
data: List[integer] = Field(default_factory=list, description="""Index of the image (using zero-indexing) in the linked Images object.""")
description: Optional[string] = Field(None, description="""Description of the time series.""")
comments: Optional[string] = Field(None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""")
starting_time: Optional[TimeSeriesStartingTime] = Field(None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""")
timestamps: Optional[List[float]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[int]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[str]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
timestamps: Optional[List[double]] = Field(default_factory=list, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""")
control: Optional[List[integer]] = Field(default_factory=list, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""")
control_description: Optional[List[string]] = Field(default_factory=list, description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""")
sync: Optional[TimeSeriesSync] = Field(None, description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""")

View file

@ -11,35 +11,35 @@ else:
from typing_extensions import Literal
from .core_nwb_misc_include import (
DecompositionSeriesSourceChannels,
UnitsWaveformSd,
AbstractFeatureSeriesData,
UnitsObsIntervalsIndex,
DecompositionSeriesData,
UnitsWaveformsIndexIndex,
UnitsElectrodes,
UnitsSpikeTimesIndex,
UnitsSpikeTimes,
UnitsObsIntervals,
UnitsElectrodesIndex,
UnitsWaveformMean,
UnitsWaveformsIndex,
UnitsWaveforms
)
from .hdmf_common_table import (
DynamicTable
)
from .core_nwb_misc_include import (
UnitsElectrodes,
UnitsElectrodesIndex,
UnitsObsIntervalsIndex,
UnitsSpikeTimes,
UnitsSpikeTimesIndex,
UnitsWaveformSd,
UnitsWaveformMean,
UnitsWaveforms,
AbstractFeatureSeriesData,
UnitsWaveformsIndexIndex,
UnitsObsIntervals,
UnitsWaveformsIndex,
DecompositionSeriesData,
DecompositionSeriesSourceChannels
from .core_nwb_ecephys import (
ElectrodeGroup
)
from .core_nwb_base import (
TimeSeries
)
from .core_nwb_ecephys import (
ElectrodeGroup
)
metamodel_version = "None"
version = "None"

View file

@ -12,9 +12,9 @@ else:
from .hdmf_common_table import (
DynamicTableRegion,
VectorData,
VectorIndex
VectorIndex,
DynamicTableRegion
)
from .nwb_language import (

View file

@ -11,22 +11,16 @@ else:
from typing_extensions import Literal
from .core_nwb_base import (
TimeSeries,
NWBDataInterface,
NWBContainer
)
from .core_nwb_ophys_include import (
RoiResponseSeriesRois,
PlaneSegmentationImageMask,
TwoPhotonSeriesFieldOfView,
ImagingPlaneOriginCoords,
RoiResponseSeriesData,
PlaneSegmentationPixelMaskIndex,
PlaneSegmentationImageMask,
ImagingPlaneGridSpacing,
RoiResponseSeriesRois,
PlaneSegmentationVoxelMaskIndex,
ImagingPlaneManifold
RoiResponseSeriesData,
ImagingPlaneManifold,
ImagingPlaneGridSpacing,
PlaneSegmentationPixelMaskIndex
)
from .hdmf_common_table import (
@ -37,6 +31,12 @@ from .core_nwb_image import (
ImageSeries
)
from .core_nwb_base import (
NWBDataInterface,
NWBContainer,
TimeSeries
)
metamodel_version = "None"
version = "None"

View file

@ -12,9 +12,9 @@ else:
from .hdmf_common_table import (
DynamicTableRegion,
VectorData,
VectorIndex
VectorIndex,
DynamicTableRegion
)
from .nwb_language import (

View file

@ -12,13 +12,13 @@ else:
from .core_nwb_retinotopy_include import (
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis1PhaseMap,
ImagingRetinotopyVasculatureImage,
ImagingRetinotopySignMap,
ImagingRetinotopyFocalDepthImage,
ImagingRetinotopyAxis2PowerMap,
ImagingRetinotopyVasculatureImage,
ImagingRetinotopyAxis2PhaseMap,
ImagingRetinotopyFocalDepthImage
ImagingRetinotopyAxis1PowerMap,
ImagingRetinotopyAxis1PhaseMap
)
from .core_nwb_base import (

View file

@ -33,9 +33,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
Phase response to stimulus on the first measured axis.
"""
name: str = Field("axis_1_phase_map", const=True)
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
unit: Optional[string] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
@ -50,9 +50,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
name: str = Field("axis_1_power_map", const=True)
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
unit: Optional[string] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
@ -67,9 +67,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
Phase response to stimulus on the second measured axis.
"""
name: str = Field("axis_2_phase_map", const=True)
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
unit: Optional[string] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
@ -84,9 +84,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.
"""
name: str = Field("axis_2_power_map", const=True)
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
unit: Optional[str] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
unit: Optional[string] = Field(None, description="""Unit that axis data is stored in (e.g., degrees).""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
@ -101,18 +101,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].
"""
name: str = Field("focal_depth_image", const=True)
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
bits_per_pixel: Optional[integer] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
format: Optional[string] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], UInt16]] = Field(None)
class ImagingRetinotopyFocalDepthImageArray(Arraylike):
num_rows: int = Field(...)
num_cols: int = Field(...)
num_rows: integer = Field(...)
num_cols: integer = Field(...)
class ImagingRetinotopySignMap(ConfiguredBaseModel):
@ -120,7 +120,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
Sine of the angle between the direction of the gradient in axis_1 and axis_2.
"""
name: str = Field("sign_map", const=True)
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], Float32]] = Field(None)
@ -136,17 +136,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]
"""
name: str = Field("vasculature_image", const=True)
bits_per_pixel: Optional[int] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""")
dimension: Optional[int] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
bits_per_pixel: Optional[integer] = Field(None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""")
dimension: Optional[integer] = Field(None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""")
field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""")
format: Optional[str] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
format: Optional[string] = Field(None, description="""Format of image. Right now only 'raw' is supported.""")
array: Optional[NDArray[Shape["* num_rows, * num_cols"], UInt16]] = Field(None)
class ImagingRetinotopyVasculatureImageArray(Arraylike):
num_rows: int = Field(...)
num_cols: int = Field(...)
num_rows: integer = Field(...)
num_cols: integer = Field(...)

View file

@ -33,9 +33,9 @@ class CSRMatrix(Container):
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
name: str = Field(...)
shape: Optional[int] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: List[int] = Field(default_factory=list, description="""The column indices.""")
indptr: List[int] = Field(default_factory=list, description="""The row index pointer.""")
shape: Optional[integer] = Field(None, description="""The shape (number of rows, number of columns) of this sparse matrix.""")
indices: List[integer] = Field(default_factory=list, description="""The column indices.""")
indptr: List[integer] = Field(default_factory=list, description="""The row index pointer.""")
data: List[Any] = Field(default_factory=list, description="""The non-zero values in the matrix.""")

View file

@ -17,8 +17,8 @@ from .hdmf_common_base import (
)
from .hdmf_common_table_include import (
VectorDataArray,
ElementIdentifiersArray
ElementIdentifiersArray,
VectorDataArray
)

View file

View file

View file

@ -120,7 +120,7 @@ class GitRepo:
return False
# Check that the remote matches
if self.remote.strip('.git') != self.namespace.repository:
if self.remote.strip('.git') != str(self.namespace.repository):
warnings.warn(f'Repository exists, but has the wrong remote URL.\nExpected: {self.namespace.repository}\nGot:{self.remote.strip(".git")}')
return False

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.base.include
- core.nwb.base
default_prefix: core.nwb.base.include/
prefixes:
core.nwb.base:
prefix_prefix: core.nwb.base
prefix_reference: https://example.com/core.nwb.base/
default_prefix: core.nwb.base
classes:
Image__Array:
name: Image__Array

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.base.include
- core.nwb.base
default_prefix: core.nwb.base/
prefixes:
core.nwb.base:
prefix_prefix: core.nwb.base
prefix_reference: https://example.com/core.nwb.base/
default_prefix: core.nwb.base
classes:
NWBData:
name: NWBData

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.behavior.include
- core.nwb.behavior
default_prefix: core.nwb.behavior.include/
prefixes:
core.nwb.behavior:
prefix_prefix: core.nwb.behavior
prefix_reference: https://example.com/core.nwb.behavior/
default_prefix: core.nwb.behavior
classes:
SpatialSeries__data:
name: SpatialSeries__data

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.behavior.include
- core.nwb.behavior
default_prefix: core.nwb.behavior/
prefixes:
core.nwb.behavior:
prefix_prefix: core.nwb.behavior
prefix_reference: https://example.com/core.nwb.behavior/
default_prefix: core.nwb.behavior
classes:
SpatialSeries:
name: SpatialSeries

View file

@ -4,7 +4,11 @@ imports:
- core.nwb.base
- nwb.language
- core.nwb.device
default_prefix: core.nwb.device/
prefixes:
core.nwb.device:
prefix_prefix: core.nwb.device
prefix_reference: https://example.com/core.nwb.device/
default_prefix: core.nwb.device
classes:
Device:
name: Device

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.ecephys.include
- core.nwb.ecephys
default_prefix: core.nwb.ecephys.include/
prefixes:
core.nwb.ecephys:
prefix_prefix: core.nwb.ecephys
prefix_reference: https://example.com/core.nwb.ecephys/
default_prefix: core.nwb.ecephys
classes:
ElectricalSeries__data:
name: ElectricalSeries__data

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.ecephys.include
- core.nwb.ecephys
default_prefix: core.nwb.ecephys/
prefixes:
core.nwb.ecephys:
prefix_prefix: core.nwb.ecephys
prefix_reference: https://example.com/core.nwb.ecephys/
default_prefix: core.nwb.ecephys
classes:
ElectricalSeries:
name: ElectricalSeries

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.epoch.include
- core.nwb.epoch
default_prefix: core.nwb.epoch.include/
prefixes:
core.nwb.epoch:
prefix_prefix: core.nwb.epoch
prefix_reference: https://example.com/core.nwb.epoch/
default_prefix: core.nwb.epoch
classes:
TimeIntervals__tags_index:
name: TimeIntervals__tags_index

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.epoch.include
- core.nwb.epoch
default_prefix: core.nwb.epoch/
prefixes:
core.nwb.epoch:
prefix_prefix: core.nwb.epoch
prefix_reference: https://example.com/core.nwb.epoch/
default_prefix: core.nwb.epoch
classes:
TimeIntervals:
name: TimeIntervals

View file

@ -13,7 +13,11 @@ imports:
- nwb.language
- core.nwb.file.include
- core.nwb.file
default_prefix: core.nwb.file.include/
prefixes:
core.nwb.file:
prefix_prefix: core.nwb.file
prefix_reference: https://example.com/core.nwb.file/
default_prefix: core.nwb.file
classes:
NWBFile__stimulus:
name: NWBFile__stimulus

View file

@ -13,7 +13,11 @@ imports:
- nwb.language
- core.nwb.file.include
- core.nwb.file
default_prefix: core.nwb.file/
prefixes:
core.nwb.file:
prefix_prefix: core.nwb.file
prefix_reference: https://example.com/core.nwb.file/
default_prefix: core.nwb.file
classes:
ScratchData:
name: ScratchData

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.icephys.include
- core.nwb.icephys
default_prefix: core.nwb.icephys.include/
prefixes:
core.nwb.icephys:
prefix_prefix: core.nwb.icephys
prefix_reference: https://example.com/core.nwb.icephys/
default_prefix: core.nwb.icephys
classes:
CurrentClampSeries__data:
name: CurrentClampSeries__data

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.icephys.include
- core.nwb.icephys
default_prefix: core.nwb.icephys/
prefixes:
core.nwb.icephys:
prefix_prefix: core.nwb.icephys
prefix_reference: https://example.com/core.nwb.icephys/
default_prefix: core.nwb.icephys
classes:
PatchClampSeries:
name: PatchClampSeries

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.image.include
- core.nwb.image
default_prefix: core.nwb.image.include/
prefixes:
core.nwb.image:
prefix_prefix: core.nwb.image
prefix_reference: https://example.com/core.nwb.image/
default_prefix: core.nwb.image
classes:
GrayscaleImage__Array:
name: GrayscaleImage__Array

View file

@ -6,7 +6,11 @@ imports:
- nwb.language
- core.nwb.image.include
- core.nwb.image
default_prefix: core.nwb.image/
prefixes:
core.nwb.image:
prefix_prefix: core.nwb.image
prefix_reference: https://example.com/core.nwb.image/
default_prefix: core.nwb.image
classes:
GrayscaleImage:
name: GrayscaleImage

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.misc.include
- core.nwb.misc
default_prefix: core.nwb.misc.include/
prefixes:
core.nwb.misc:
prefix_prefix: core.nwb.misc
prefix_reference: https://example.com/core.nwb.misc/
default_prefix: core.nwb.misc
classes:
AbstractFeatureSeries__data:
name: AbstractFeatureSeries__data

View file

@ -7,7 +7,11 @@ imports:
- nwb.language
- core.nwb.misc.include
- core.nwb.misc
default_prefix: core.nwb.misc/
prefixes:
core.nwb.misc:
prefix_prefix: core.nwb.misc
prefix_reference: https://example.com/core.nwb.misc/
default_prefix: core.nwb.misc
classes:
AbstractFeatureSeries:
name: AbstractFeatureSeries

View file

@ -5,7 +5,11 @@ imports:
- core.nwb.device
- nwb.language
- core.nwb.ogen
default_prefix: core.nwb.ogen/
prefixes:
core.nwb.ogen:
prefix_prefix: core.nwb.ogen
prefix_reference: https://example.com/core.nwb.ogen/
default_prefix: core.nwb.ogen
classes:
OptogeneticSeries:
name: OptogeneticSeries

View file

@ -8,7 +8,11 @@ imports:
- nwb.language
- core.nwb.ophys.include
- core.nwb.ophys
default_prefix: core.nwb.ophys.include/
prefixes:
core.nwb.ophys:
prefix_prefix: core.nwb.ophys
prefix_reference: https://example.com/core.nwb.ophys/
default_prefix: core.nwb.ophys
classes:
TwoPhotonSeries__field_of_view:
name: TwoPhotonSeries__field_of_view

View file

@ -8,7 +8,11 @@ imports:
- nwb.language
- core.nwb.ophys.include
- core.nwb.ophys
default_prefix: core.nwb.ophys/
prefixes:
core.nwb.ophys:
prefix_prefix: core.nwb.ophys
prefix_reference: https://example.com/core.nwb.ophys/
default_prefix: core.nwb.ophys
classes:
OnePhotonSeries:
name: OnePhotonSeries

View file

@ -5,7 +5,11 @@ imports:
- nwb.language
- core.nwb.retinotopy.include
- core.nwb.retinotopy
default_prefix: core.nwb.retinotopy.include/
prefixes:
core.nwb.retinotopy:
prefix_prefix: core.nwb.retinotopy
prefix_reference: https://example.com/core.nwb.retinotopy/
default_prefix: core.nwb.retinotopy
classes:
ImagingRetinotopy__axis_1_phase_map:
name: ImagingRetinotopy__axis_1_phase_map

View file

@ -5,7 +5,11 @@ imports:
- nwb.language
- core.nwb.retinotopy.include
- core.nwb.retinotopy
default_prefix: core.nwb.retinotopy/
prefixes:
core.nwb.retinotopy:
prefix_prefix: core.nwb.retinotopy
prefix_reference: https://example.com/core.nwb.retinotopy/
default_prefix: core.nwb.retinotopy
classes:
ImagingRetinotopy:
name: ImagingRetinotopy

View file

@ -15,4 +15,8 @@ imports:
- core.nwb.ogen
- core.nwb.ophys
- core.nwb.retinotopy
default_prefix: core/
prefixes:
core:
prefix_prefix: core
prefix_reference: https://example.com/core/
default_prefix: core

View file

@ -3,7 +3,11 @@ id: hdmf-common.base
imports:
- nwb.language
- hdmf-common.base
default_prefix: hdmf-common.base/
prefixes:
hdmf-common.base:
prefix_prefix: hdmf-common.base
prefix_reference: https://example.com/hdmf-common.base/
default_prefix: hdmf-common.base
classes:
Data:
name: Data

View file

@ -4,7 +4,11 @@ imports:
- hdmf-common.base
- nwb.language
- hdmf-common.sparse
default_prefix: hdmf-common.sparse/
prefixes:
hdmf-common.sparse:
prefix_prefix: hdmf-common.sparse
prefix_reference: https://example.com/hdmf-common.sparse/
default_prefix: hdmf-common.sparse
classes:
CSRMatrix:
name: CSRMatrix

View file

@ -5,7 +5,11 @@ imports:
- nwb.language
- hdmf-common.table.include
- hdmf-common.table
default_prefix: hdmf-common.table.include/
prefixes:
hdmf-common.table:
prefix_prefix: hdmf-common.table
prefix_reference: https://example.com/hdmf-common.table/
default_prefix: hdmf-common.table
classes:
VectorData__Array:
name: VectorData__Array

View file

@ -5,7 +5,11 @@ imports:
- nwb.language
- hdmf-common.table.include
- hdmf-common.table
default_prefix: hdmf-common.table/
prefixes:
hdmf-common.table:
prefix_prefix: hdmf-common.table
prefix_reference: https://example.com/hdmf-common.table/
default_prefix: hdmf-common.table
classes:
VectorData:
name: VectorData

View file

@ -6,4 +6,8 @@ imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
default_prefix: hdmf-common/
prefixes:
hdmf-common:
prefix_prefix: hdmf-common
prefix_reference: https://example.com/hdmf-common/
default_prefix: hdmf-common

View file

@ -4,7 +4,11 @@ imports:
- hdmf-common.table
- nwb.language
- hdmf-experimental.experimental
default_prefix: hdmf-experimental.experimental/
prefixes:
hdmf-experimental.experimental:
prefix_prefix: hdmf-experimental.experimental
prefix_reference: https://example.com/hdmf-experimental.experimental/
default_prefix: hdmf-experimental.experimental
classes:
EnumData:
name: EnumData

View file

@ -4,7 +4,11 @@ imports:
- hdmf-common.base
- nwb.language
- hdmf-experimental.resources
default_prefix: hdmf-experimental.resources/
prefixes:
hdmf-experimental.resources:
prefix_prefix: hdmf-experimental.resources
prefix_reference: https://example.com/hdmf-experimental.resources/
default_prefix: hdmf-experimental.resources
classes:
HERD:
name: HERD

View file

@ -6,4 +6,8 @@ version: 0.5.0
imports:
- hdmf-experimental.experimental
- hdmf-experimental.resources
default_prefix: hdmf-experimental/
prefixes:
hdmf-experimental:
prefix_prefix: hdmf-experimental
prefix_reference: https://example.com/hdmf-experimental/
default_prefix: hdmf-experimental

View file

@ -7,85 +7,112 @@ prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
nwb.language:
prefix_prefix: nwb.language
prefix_reference: https://example.com/nwb.language/
default_prefix: nwb.language
types:
float:
name: float
typeof: float
base: float
float32:
name: float32
typeof: float
base: float
double:
name: double
typeof: double
base: double
float64:
name: float64
typeof: double
base: double
long:
name: long
typeof: integer
base: int
int64:
name: int64
typeof: integer
base: int
int:
name: int
typeof: integer
base: int
int32:
name: int32
typeof: integer
base: int
int16:
name: int16
typeof: integer
base: int
short:
name: short
typeof: integer
base: int
int8:
name: int8
typeof: integer
base: int
uint:
name: uint
typeof: integer
base: int
minimum_value: 0
uint32:
name: uint32
typeof: integer
base: int
minimum_value: 0
uint16:
name: uint16
typeof: integer
base: int
minimum_value: 0
uint8:
name: uint8
typeof: integer
base: int
minimum_value: 0
uint64:
name: uint64
typeof: integer
base: int
minimum_value: 0
numeric:
name: numeric
typeof: float
base: float
text:
name: text
typeof: string
base: string
utf:
name: utf
typeof: string
base: string
utf8:
name: utf8
typeof: string
base: string
utf_8:
name: utf_8
typeof: string
base: string
ascii:
name: ascii
typeof: string
base: string
bool:
name: bool
typeof: boolean
base: boolean
isodatetime:
name: isodatetime
typeof: datetime
base: datetime
enums:
FlatDType:
name: FlatDType

318
poetry.lock generated
View file

@ -238,13 +238,13 @@ tests = ["coverage", "pytest"]
[[package]]
name = "dash"
version = "2.12.1"
version = "2.13.0"
description = "A Python framework for building reactive web-apps. Developed by Plotly."
optional = false
python-versions = ">=3.6"
files = [
{file = "dash-2.12.1-py3-none-any.whl", hash = "sha256:23fcde95e59e353c34712c8fa3e90e784a7247a9e5f6ef47e467add10b7e91ab"},
{file = "dash-2.12.1.tar.gz", hash = "sha256:c7d3dccafff2d041a371dcf5bbb2a1701a38ca178c12dce93e64207e3aecbaeb"},
{file = "dash-2.13.0-py3-none-any.whl", hash = "sha256:ca21f01f720652c7e2d16d04d4e27803c2b60c4c2a382e750c3d8d778c06e209"},
{file = "dash-2.13.0.tar.gz", hash = "sha256:07c192db694b9bb4c87d57b6da877413f2695bfcb1d5c51f08995de7dcdd1e92"},
]
[package.dependencies]
@ -1035,13 +1035,13 @@ tenacity = ">=6.2.0"
[[package]]
name = "pluggy"
version = "1.2.0"
version = "1.3.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
{file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
{file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
{file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
]
[package.extras]
@ -1590,108 +1590,108 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rpds-py"
version = "0.9.2"
version = "0.10.0"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.8"
files = [
{file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"},
{file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"},
{file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"},
{file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"},
{file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"},
{file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"},
{file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"},
{file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"},
{file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"},
{file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"},
{file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"},
{file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"},
{file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"},
{file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"},
{file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"},
{file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"},
{file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"},
{file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"},
{file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"},
{file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"},
{file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"},
{file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"},
{file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"},
{file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"},
{file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"},
{file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"},
{file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"},
{file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"},
{file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"},
{file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"},
{file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"},
{file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"},
{file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"},
{file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"},
{file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"},
{file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"},
{file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"},
{file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"},
{file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"},
{file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"},
{file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"},
{file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"},
{file = "rpds_py-0.10.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c1e0e9916301e3b3d970814b1439ca59487f0616d30f36a44cead66ee1748c31"},
{file = "rpds_py-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ce8caa29ebbdcde67e5fd652c811d34bc01f249dbc0d61e5cc4db05ae79a83b"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad277f74b1c164f7248afa968700e410651eb858d7c160d109fb451dc45a2f09"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e1c68303ccf7fceb50fbab79064a2636119fd9aca121f28453709283dbca727"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:780fcb855be29153901c67fc9c5633d48aebef21b90aa72812fa181d731c6b00"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbd7b24d108509a1b9b6679fcc1166a7dd031dbef1f3c2c73788f42e3ebb3beb"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0700c2133ba203c4068aaecd6a59bda22e06a5e46255c9da23cbf68c6942215d"},
{file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576da63eae7809f375932bfcbca2cf20620a1915bf2fedce4b9cc8491eceefe3"},
{file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23750a9b8a329844ba1fe267ca456bb3184984da2880ed17ae641c5af8de3fef"},
{file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d08395595c42bcd82c3608762ce734504c6d025eef1c06f42326a6023a584186"},
{file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1d7b7b71bcb82d8713c7c2e9c5f061415598af5938666beded20d81fa23e7640"},
{file = "rpds_py-0.10.0-cp310-none-win32.whl", hash = "sha256:97f5811df21703446b42303475b8b855ee07d6ab6cdf8565eff115540624f25d"},
{file = "rpds_py-0.10.0-cp310-none-win_amd64.whl", hash = "sha256:cdbed8f21204398f47de39b0a9b180d7e571f02dfb18bf5f1b618e238454b685"},
{file = "rpds_py-0.10.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:7a3a3d3e4f1e3cd2a67b93a0b6ed0f2499e33f47cc568e3a0023e405abdc0ff1"},
{file = "rpds_py-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc72ae476732cdb7b2c1acb5af23b478b8a0d4b6fcf19b90dd150291e0d5b26b"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0583f69522732bdd79dca4cd3873e63a29acf4a299769c7541f2ca1e4dd4bc6"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8b9a7cd381970e64849070aca7c32d53ab7d96c66db6c2ef7aa23c6e803f514"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d292cabd7c8335bdd3237ded442480a249dbcdb4ddfac5218799364a01a0f5c"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6903cdca64f1e301af9be424798328c1fe3b4b14aede35f04510989fc72f012"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed57543c99249ab3a4586ddc8786529fbc33309e5e8a1351802a06ca2baf4c2"},
{file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15932ec5f224b0e35764dc156514533a4fca52dcfda0dfbe462a1a22b37efd59"},
{file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb2d59bc196e6d3b1827c7db06c1a898bfa0787c0574af398e65ccf2e97c0fbe"},
{file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f99d74ddf9d3b6126b509e81865f89bd1283e3fc1b568b68cd7bd9dfa15583d7"},
{file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f70bec8a14a692be6dbe7ce8aab303e88df891cbd4a39af091f90b6702e28055"},
{file = "rpds_py-0.10.0-cp311-none-win32.whl", hash = "sha256:5f7487be65b9c2c510819e744e375bd41b929a97e5915c4852a82fbb085df62c"},
{file = "rpds_py-0.10.0-cp311-none-win_amd64.whl", hash = "sha256:748e472345c3a82cfb462d0dff998a7bf43e621eed73374cb19f307e97e08a83"},
{file = "rpds_py-0.10.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:d4639111e73997567343df6551da9dd90d66aece1b9fc26c786d328439488103"},
{file = "rpds_py-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4760e1b02173f4155203054f77a5dc0b4078de7645c922b208d28e7eb99f3e2"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a6420a36975e0073acaeee44ead260c1f6ea56812cfc6c31ec00c1c48197173"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58fc4d66ee349a23dbf08c7e964120dc9027059566e29cf0ce6205d590ed7eca"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:063411228b852fb2ed7485cf91f8e7d30893e69b0acb207ec349db04cccc8225"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65af12f70355de29e1092f319f85a3467f4005e959ab65129cb697169ce94b86"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298e8b5d8087e0330aac211c85428c8761230ef46a1f2c516d6a2f67fb8803c5"},
{file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b9bf77008f2c55dabbd099fd3ac87009471d223a1c7ebea36873d39511b780a"},
{file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c7853f27195598e550fe089f78f0732c66ee1d1f0eaae8ad081589a5a2f5d4af"},
{file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:75dbfd41a61bc1fb0536bf7b1abf272dc115c53d4d77db770cd65d46d4520882"},
{file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b25136212a3d064a8f0b9ebbb6c57094c5229e0de76d15c79b76feff26aeb7b8"},
{file = "rpds_py-0.10.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:9affee8cb1ec453382c27eb9043378ab32f49cd4bc24a24275f5c39bf186c279"},
{file = "rpds_py-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d55528ef13af4b4e074d067977b1f61408602f53ae4537dccf42ba665c2c7bd"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7865df1fb564092bcf46dac61b5def25342faf6352e4bc0e61a286e3fa26a3d"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f5cc8c7bc99d2bbcd704cef165ca7d155cd6464c86cbda8339026a42d219397"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbae50d352e4717ffc22c566afc2d0da744380e87ed44a144508e3fb9114a3f4"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fccbf0cd3411719e4c9426755df90bf3449d9fc5a89f077f4a7f1abd4f70c910"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d10c431073dc6ebceed35ab22948a016cc2b5120963c13a41e38bdde4a7212"},
{file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b401e8b9aece651512e62c431181e6e83048a651698a727ea0eb0699e9f9b74"},
{file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:7618a082c55cf038eede4a918c1001cc8a4411dfe508dc762659bcd48d8f4c6e"},
{file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b3226b246facae14909b465061ddcfa2dfeadb6a64f407f24300d42d69bcb1a1"},
{file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a8edd467551c1102dc0f5754ab55cd0703431cd3044edf8c8e7d9208d63fa453"},
{file = "rpds_py-0.10.0-cp38-none-win32.whl", hash = "sha256:71333c22f7cf5f0480b59a0aef21f652cf9bbaa9679ad261b405b65a57511d1e"},
{file = "rpds_py-0.10.0-cp38-none-win_amd64.whl", hash = "sha256:a8ab1adf04ae2d6d65835995218fd3f3eb644fe20655ca8ee233e2c7270ff53b"},
{file = "rpds_py-0.10.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:87c93b25d538c433fb053da6228c6290117ba53ff6a537c133b0f2087948a582"},
{file = "rpds_py-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7996aed3f65667c6dcc8302a69368435a87c2364079a066750a2eac75ea01e"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8856aa76839dc234d3469f1e270918ce6bec1d6a601eba928f45d68a15f04fc3"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00215f6a9058fbf84f9d47536902558eb61f180a6b2a0fa35338d06ceb9a2e5a"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23a059143c1393015c68936370cce11690f7294731904bdae47cc3e16d0b2474"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e5c26905aa651cc8c0ddc45e0e5dea2a1296f70bdc96af17aee9d0493280a17"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c651847545422c8131660704c58606d841e228ed576c8f1666d98b3d318f89da"},
{file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80992eb20755701753e30a6952a96aa58f353d12a65ad3c9d48a8da5ec4690cf"},
{file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ffcf18ad3edf1c170e27e88b10282a2c449aa0358659592462448d71b2000cfc"},
{file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:08e08ccf5b10badb7d0a5c84829b914c6e1e1f3a716fdb2bf294e2bd01562775"},
{file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7150b83b3e3ddaac81a8bb6a9b5f93117674a0e7a2b5a5b32ab31fdfea6df27f"},
{file = "rpds_py-0.10.0-cp39-none-win32.whl", hash = "sha256:3455ecc46ea443b5f7d9c2f946ce4017745e017b0d0f8b99c92564eff97e97f5"},
{file = "rpds_py-0.10.0-cp39-none-win_amd64.whl", hash = "sha256:afe6b5a04b2ab1aa89bad32ca47bf71358e7302a06fdfdad857389dca8fb5f04"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b1cb078f54af0abd835ca76f93a3152565b73be0f056264da45117d0adf5e99c"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8e7e2b3577e97fa43c2c2b12a16139b2cedbd0770235d5179c0412b4794efd9b"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae46a50d235f1631d9ec4670503f7b30405103034830bc13df29fd947207f795"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f869e34d2326e417baee430ae998e91412cc8e7fdd83d979277a90a0e79a5b47"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d544a614055b131111bed6edfa1cb0fb082a7265761bcb03321f2dd7b5c6c48"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9c2f6ca9774c2c24bbf7b23086264e6b5fa178201450535ec0859739e6f78d"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2da4a8c6d465fde36cea7d54bf47b5cf089073452f0e47c8632ecb9dec23c07"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac00c41dd315d147b129976204839ca9de699d83519ff1272afbe4fb9d362d12"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0155c33af0676fc38e1107679be882077680ad1abb6303956b97259c3177e85e"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:db6585b600b2e76e98131e0ac0e5195759082b51687ad0c94505970c90718f4a"},
{file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:7b6975d3763d0952c111700c0634968419268e6bbc0b55fe71138987fa66f309"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6388e4e95a26717b94a05ced084e19da4d92aca883f392dffcf8e48c8e221a24"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:18f87baa20e02e9277ad8960cd89b63c79c05caf106f4c959a9595c43f2a34a5"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f05fc7d832e970047662b3440b190d24ea04f8d3c760e33e7163b67308c878"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:291c9ce3929a75b45ce8ddde2aa7694fc8449f2bc8f5bd93adf021efaae2d10b"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:861d25ae0985a1dd5297fee35f476b60c6029e2e6e19847d5b4d0a43a390b696"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:668d2b45d62c68c7a370ac3dce108ffda482b0a0f50abd8b4c604a813a59e08f"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344b89384c250ba6a4ce1786e04d01500e4dac0f4137ceebcaad12973c0ac0b3"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:885e023e73ce09b11b89ab91fc60f35d80878d2c19d6213a32b42ff36543c291"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:841128a22e6ac04070a0f84776d07e9c38c4dcce8e28792a95e45fc621605517"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:899b5e7e2d5a8bc92aa533c2d4e55e5ebba095c485568a5e4bedbc163421259a"},
{file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e7947d9a6264c727a556541b1630296bbd5d0a05068d21c38dde8e7a1c703ef0"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4992266817169997854f81df7f6db7bdcda1609972d8ffd6919252f09ec3c0f6"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:26d9fd624649a10e4610fab2bc820e215a184d193e47d0be7fe53c1c8f67f370"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0028eb0967942d0d2891eae700ae1a27b7fd18604cfcb16a1ef486a790fee99e"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9e7e493ded7042712a374471203dd43ae3fff5b81e3de1a0513fa241af9fd41"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d68a8e8a3a816629283faf82358d8c93fe5bd974dd2704152394a3de4cec22a"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6d5f061f6a2aa55790b9e64a23dfd87b6664ab56e24cd06c78eb43986cb260b"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c7c4266c1b61eb429e8aeb7d8ed6a3bfe6c890a1788b18dbec090c35c6b93fa"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80772e3bda6787510d9620bc0c7572be404a922f8ccdfd436bf6c3778119464c"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b98e75b21fc2ba5285aef8efaf34131d16af1c38df36bdca2f50634bea2d3060"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:d63787f289944cc4bde518ad2b5e70a4f0d6e2ce76324635359c74c113fd188f"},
{file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:872f3dcaa8bf2245944861d7311179d2c0c9b2aaa7d3b464d99a7c2e401f01fa"},
{file = "rpds_py-0.10.0.tar.gz", hash = "sha256:e36d7369363d2707d5f68950a64c4e025991eb0177db01ccb6aa6facae48b69f"},
]
[[package]]
@ -1848,52 +1848,52 @@ pandas = ["pandas (>=1.3.5)"]
[[package]]
name = "sqlalchemy"
version = "2.0.20"
version = "2.0.11"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"},
{file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"},
{file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e9069faea64d3390d90d16e5b2bc0652d8eb979ccdfd555822d96bc8d93afda1"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8aea55b1754430449d43823c8c4da2d5c7621ccd1fcd4c36231417762542d4ef"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ccd20b5a4e3511c2f0c889b7b79a7462b6c6aa2c06d0f4943c27a552e35e091"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dcfea87230e34d7d55f67959ed09d3e60e09b77c76996de151c32f1b780135"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a836f391d7dc1039f10d2ef58cdc6e271462d6898dacdae1bfabfc16ca295f2c"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25bbf89e6f171d37cf3a993dbeee18cb85abe37a421c40e78131bf339e48da9d"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-win32.whl", hash = "sha256:0624852aec618438a4cd7a53ce00835435588506e6f8fbd60deaf9ac109f7cd0"},
{file = "SQLAlchemy-2.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d7eab7d668f95a1a2ef443da17154834adf9c5ac742a5992d5ebecbdca7d943e"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa81761ff674d2e2d591fc88d31835d3ecf65bddb021a522f4eaaae831c584cf"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:21f447403a1bfeb832a7384c4ac742b7baab04460632c0335e020e8e2c741d4b"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4d8d96c0a7265de8496250a2c2d02593da5e5e85ea24b5c54c2db028d74cf8c"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c4c5834789f718315cb25d1b95d18fde91b72a1a158cdc515d7f6380c1f02a3"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f57965a9d5882efdea0a2c87ae2f6c7dbc14591dcd0639209b50eec2b3ec947e"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0dd98b0be54503afc4c74e947720c3196f96fb2546bfa54d911d5de313c5463c"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-win32.whl", hash = "sha256:eec40c522781a58839df6a2a7a2d9fbaa473419a3ab94633d61e00a8c0c768b7"},
{file = "SQLAlchemy-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:62835d8cd6713458c032466c38a43e56503e19ea6e54b0e73295c6ab281fc0b1"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:216b9c4dbeaa143a36c9249f9e5a0fd7fa6549a1a3f9de9a2d30104f7e35d8b9"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7710fd24bcf33abed7ab7673dbb38ad48f20555835ff8c77258f07de46a87"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:718c0a9f8509542d0674c15b01f362b2f10e8bc425db74444bda4e073e06e660"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2a5fb41db86f6d4892edcf30bd67418dd757eb0246242648e610fa2bca7533d4"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:352dcd93e5a0421eee59dbac0000f8f811203cf228334d85d77b3ef075707322"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-win32.whl", hash = "sha256:fb21777cc9205b94f51688cdcba0924bdecbeb23dcf81473ff8c5352211e6e38"},
{file = "SQLAlchemy-2.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9268d7417467e9fde5f4364c71ce490b18a4b83a6543b0d55d1f83fce42bda"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:125c41b3557179e9a514a1cfe2764433177ba6195b2264725ceaa7a2e8afcbde"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19a03413cf36e86674857e519936b9c9e52059ba9f6e2ab0ec75d9a458277cb"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e48d908695abe05435250e0a083416cc49bd5afd46bc16a7ec8725771aad8eac"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3728f7518aa70e5ce88fae4c68b5d7f25493f37d8d867e4a7d60905bd162cd0d"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1ab6ac214354957db83c72c65941af7e022d4c9324bdadc54d0266aa162a3828"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:abadc6bf6b2c0a0be4370513221563afdbac3901d29fcdb7faf23b4e1ed26068"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-win32.whl", hash = "sha256:78cbc8eba442c9b8dc2d90c43ac477f0ee27467617704cd82d741b2eb061afb2"},
{file = "SQLAlchemy-2.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:384fdde6bd628d1a882f04aa9a40aa6928840b02d595ff5bd08abeae4c25f867"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:737a70c994f5b34e437a6ca754957a7a0f6f76c59fa460fc59d1bd15b8f8cb32"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e53e4920cd5872280256ddf6ca843b5d1435e0302847992bcb90f84b744999f"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:409cc6cd15d4db5c5af2c4e2d3a2137815c31d065cea9a77dec92cbe7cfcf448"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71dd742e3146be6fdded0b95a4b779f7d81595760eab32b0f718089573d3b86"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d85ca17b070f7076ec2582324331cf3683c09146fd8bd2621e8d80d6c3a93bbf"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a785c30929a5d82f2fa1c60ec46d623d418b19981dc0c594da806d3901658e39"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-win32.whl", hash = "sha256:66f24708cebe5a4e900e221574b50e102908f60f539fea30f1922705c0e97744"},
{file = "SQLAlchemy-2.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:5a2f95901e6bbed27b4ad5d59ab3f970eda0ce0b9ede3a67b6f9a914149ed71b"},
{file = "SQLAlchemy-2.0.11-py3-none-any.whl", hash = "sha256:1d28e8278d943d9111d44720f92cc338282e956ed68849bfcee053c06bde4f39"},
{file = "SQLAlchemy-2.0.11.tar.gz", hash = "sha256:c3cbff7cced3c42dbe71448ce6bf4202b4a2d305e78dd77e3f280ba6cd245138"},
]
[package.dependencies]
@ -1901,7 +1901,7 @@ greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or pl
typing-extensions = ">=4.2.0"
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
@ -1920,10 +1920,28 @@ postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "sqlmodel"
version = "0"
description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
optional = false
python-versions = "^3.7"
files = []
develop = false
[package.dependencies]
pydantic = "^2.1.1"
SQLAlchemy = ">=2.0.0,<=2.0.11"
[package.source]
type = "git"
url = "https://github.com/honglei/sqlmodel.git"
reference = "main"
resolved_reference = "4213c978fc17b6f57337c43295adffd75eb554e4"
[[package]]
name = "tenacity"
version = "8.2.3"
@ -2158,4 +2176,4 @@ tests = ["pytest", "pytest-depends"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "b552d70308f5b757a624b3d0046e54af4297bfba994a8019f4b366cce2fbe06f"
content-hash = "b2c008f0aadb604723b4e4387f59bf189691c2e2552cd86ed77990f08a3e626c"

View file

@ -23,6 +23,7 @@ dash = {version="^2.12.1", optional=true}
dash-cytoscape = {version="^0.3.0", optional=true}
nptyping = "^2.5.0"
pydantic = "^2.3.0"
sqlmodel = { git = "https://github.com/honglei/sqlmodel.git", branch = "main" }
[tool.poetry.extras]
dev = ["nwb_schema_language"]

View file

@ -2,7 +2,7 @@ from argparse import ArgumentParser
from pathlib import Path
from linkml_runtime.dumpers import yaml_dumper
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from linkml.generators.sqlalchemygen import SQLAlchemyGenerator, TemplateEnum
from nwb_linkml import io
def generate_core_yaml(output_path:Path):
@ -28,6 +28,18 @@ def generate_core_pydantic(yaml_path:Path, output_path:Path):
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)
def generate_core_sqlalchemy(yaml_path:Path, output_path:Path):
for schema in yaml_path.glob('*.yaml'):
python_name = schema.stem.replace('.', '_').replace('-', '_')
pydantic_file = (output_path / python_name).with_suffix('.py')
generator =SQLAlchemyGenerator(
str(schema)
)
gen_pydantic = generator.generate_sqla(template=TemplateEnum.DECLARATIVE)
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)
def parser() -> ArgumentParser:
parser = ArgumentParser('Generate NWB core schema')
parser.add_argument(
@ -40,7 +52,13 @@ def parser() -> ArgumentParser:
'--pydantic',
help="directory to export pydantic models",
type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'models'
default=Path(__file__).parent.parent / 'nwb_linkml' / 'models' / 'pydantic'
)
parser.add_argument(
'--sqlalchemy',
help="directory to export sqlalchemy models",
type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'models' / 'sqlalchemy'
)
return parser
@ -49,8 +67,10 @@ def main():
args = parser().parse_args()
args.yaml.mkdir(exist_ok=True)
args.pydantic.mkdir(exist_ok=True)
args.sqlalchemy.mkdir(exist_ok=True)
generate_core_yaml(args.yaml)
generate_core_pydantic(args.yaml, args.pydantic)
#generate_core_pydantic(args.yaml, args.pydantic)
generate_core_sqlalchemy(args.yaml, args.sqlalchemy)
if __name__ == "__main__":
main()

View file

@ -7,6 +7,7 @@ from .fixtures import nwb_core_fixture, tmp_output_dir
from linkml_runtime.dumpers import yaml_dumper
from linkml.generators import PydanticGenerator
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.generators.sqlmodel import SQLModelGenerator
from nwb_linkml.lang_elements import NwbLangSchema
@ -30,7 +31,7 @@ def test_generate_pydantic(tmp_output_dir):
# core_file = tmp_output_dir / 'core.yaml'
# pydantic_file = tmp_output_dir / 'core.py'
(tmp_output_dir / 'models').mkdir(exist_ok=True)
(tmp_output_dir / 'models' / 'pydantic').mkdir(exist_ok=True, parents=True)
for schema in (tmp_output_dir / 'schema').glob('*.yaml'):
if not schema.exists():
@ -38,7 +39,7 @@ def test_generate_pydantic(tmp_output_dir):
# python friendly name
python_name = schema.stem.replace('.', '_').replace('-','_')
pydantic_file = (schema.parent.parent / 'models' / python_name).with_suffix('.py')
pydantic_file = (schema.parent.parent / 'models' / 'pydantic' / python_name).with_suffix('.py')
generator = NWBPydanticGenerator(
str(schema),
@ -52,3 +53,30 @@ def test_generate_pydantic(tmp_output_dir):
with open(pydantic_file, 'w') as pfile:
pfile.write(gen_pydantic)
@pytest.mark.depends(on=['test_generate_core'])
def test_generate_sqlmodel(tmp_output_dir):
(tmp_output_dir / 'models' / 'sqlmodel').mkdir(exist_ok=True, parents=True)
for schema in (tmp_output_dir / 'schema').glob('*.yaml'):
if not schema.exists():
continue
# python friendly name
python_name = schema.stem.replace('.', '_').replace('-','_')
sqlmodel_file = (schema.parent.parent / 'models' / 'sqlmodel' / python_name).with_suffix('.py')
generator = SQLModelGenerator(
str(schema),
pydantic_version='2',
emit_metadata=True,
gen_classvars=True,
gen_slots=True
)
gen_sqlmodel = generator.generate_sqla()
with open(sqlmodel_file, 'w') as pfile:
pfile.write(gen_sqlmodel)