regenerate models

This commit is contained in:
sneakers-the-rat 2024-08-05 20:56:08 -07:00
parent c06859a537
commit 51ceb14309
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
114 changed files with 10780 additions and 34 deletions

View file

@ -123,7 +123,6 @@ class DynamicTableMixin(BaseModel):
# into {n_fields} rows, rather than keeping it in a dict # into {n_fields} rows, rather than keeping it in a dict
val = Series([val]) val = Series([val])
data[k] = val data[k] = val
data = {k: self._columns[k][rows] for k in cols}
return data return data
def __setitem__(self, key: str, value: Any) -> None: def __setitem__(self, key: str, value: Any) -> None:

View file

@ -5,7 +5,7 @@ from enum import Enum
import re import re
import sys import sys
import numpy as np import numpy as np
from pandas import DataFrame from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel):
rows, cols = item rows, cols = item
if isinstance(cols, (int, slice)): if isinstance(cols, (int, slice)):
cols = self.colnames[cols] cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols) data = self._slice_range(rows, cols)
return DataFrame.from_dict(data) return DataFrame.from_dict(data)
else: else:
@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel):
cols = self.colnames cols = self.colnames
elif isinstance(cols, str): elif isinstance(cols, str):
cols = [cols] cols = [cols]
data = {}
data = {k: self._columns[k][rows] for k in cols} for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data return data
def __setitem__(self, key: str, value: Any) -> None: def __setitem__(self, key: str, value: Any) -> None:

View file

@ -5,7 +5,7 @@ from enum import Enum
import re import re
import sys import sys
import numpy as np import numpy as np
from pandas import DataFrame from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel):
rows, cols = item rows, cols = item
if isinstance(cols, (int, slice)): if isinstance(cols, (int, slice)):
cols = self.colnames[cols] cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols) data = self._slice_range(rows, cols)
return DataFrame.from_dict(data) return DataFrame.from_dict(data)
else: else:
@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel):
cols = self.colnames cols = self.colnames
elif isinstance(cols, str): elif isinstance(cols, str):
cols = [cols] cols = [cols]
data = {}
data = {k: self._columns[k][rows] for k in cols} for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data return data
def __setitem__(self, key: str, value: Any) -> None: def __setitem__(self, key: str, value: Any) -> None:

View file

@ -5,7 +5,7 @@ from enum import Enum
import re import re
import sys import sys
import numpy as np import numpy as np
from pandas import DataFrame from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel):
rows, cols = item rows, cols = item
if isinstance(cols, (int, slice)): if isinstance(cols, (int, slice)):
cols = self.colnames[cols] cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols) data = self._slice_range(rows, cols)
return DataFrame.from_dict(data) return DataFrame.from_dict(data)
else: else:
@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel):
cols = self.colnames cols = self.colnames
elif isinstance(cols, str): elif isinstance(cols, str):
cols = [cols] cols = [cols]
data = {}
data = {k: self._columns[k][rows] for k in cols} for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data return data
def __setitem__(self, key: str, value: Any) -> None: def __setitem__(self, key: str, value: Any) -> None:

View file

@ -0,0 +1,88 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()

View file

@ -0,0 +1,133 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(ConfiguredBaseModel):
"""
a compressed sparse row matrix
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: NDArray[Shape["2 null"], int] = Field(
...,
description="""the shape of this sparse matrix""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
},
)
indices: CSRMatrixIndices = Field(..., description="""column indices""")
indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
data: CSRMatrixData = Field(..., description="""values in the matrix""")
class CSRMatrixIndices(ConfiguredBaseModel):
"""
column indices
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["indices"] = Field(
"indices",
json_schema_extra={
"linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"}
},
)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
index pointer
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["indptr"] = Field(
"indptr",
json_schema_extra={
"linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"}
},
)
class CSRMatrixData(ConfiguredBaseModel):
"""
values in the matrix
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixIndices.model_rebuild()
CSRMatrixIndptr.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,450 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
version = "1.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VocabData(VectorData):
"""
Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
vocabulary: NDArray[Shape["* null"], str] = Field(
...,
description="""The available items in the controlled vocabulary.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
VocabData.model_rebuild()
DynamicTable.model_rebuild()

View file

@ -0,0 +1,83 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_2_0.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_2_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
VocabData,
DynamicTable,
)
from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container
metamodel_version = "None"
version = "1.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.2.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,134 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_2_1.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.2.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
a compressed sparse row matrix
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: NDArray[Shape["2 null"], int] = Field(
...,
description="""the shape of this sparse matrix""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
},
)
indices: CSRMatrixIndices = Field(..., description="""column indices""")
indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
data: CSRMatrixData = Field(..., description="""values in the matrix""")
class CSRMatrixIndices(ConfiguredBaseModel):
"""
column indices
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["indices"] = Field(
"indices",
json_schema_extra={
"linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"}
},
)
class CSRMatrixIndptr(ConfiguredBaseModel):
"""
index pointer
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["indptr"] = Field(
"indptr",
json_schema_extra={
"linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"}
},
)
class CSRMatrixData(ConfiguredBaseModel):
"""
values in the matrix
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixIndices.model_rebuild()
CSRMatrixIndptr.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,450 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
version = "1.2.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VocabData(VectorData):
"""
Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
vocabulary: NDArray[Shape["* null"], str] = Field(
...,
description="""The available items in the controlled vocabulary.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
VocabData.model_rebuild()
DynamicTable.model_rebuild()

View file

@ -0,0 +1,83 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_2_1.hdmf_common_sparse import (
CSRMatrix,
CSRMatrixIndices,
CSRMatrixIndptr,
CSRMatrixData,
)
from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_2_1.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
VocabData,
DynamicTable,
)
metamodel_version = "None"
version = "1.2.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,177 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data
metamodel_version = "None"
version = "1.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.resources/",
"id": "hdmf-common.resources",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.resources",
}
)
class ExternalResources(Container):
"""
A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.resources", "tree_root": True}
)
name: str = Field(...)
keys: ExternalResourcesKeys = Field(
...,
description="""A table for storing user terms that are used to refer to external resources.""",
)
resources: ExternalResourcesResources = Field(
..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
)
objects: ExternalResourcesObjects = Field(
...,
description="""A table for identifying which objects in a file contain references to external resources.""",
)
object_keys: ExternalResourcesObjectKeys = Field(
..., description="""A table for identifying which objects use which keys."""
)
class ExternalResourcesKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
name: Literal["keys"] = Field(
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
key_name: str = Field(
...,
description="""The user term that maps to one or more resources in the 'resources' table.""",
)
class ExternalResourcesResources(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
name: Literal["resources"] = Field(
"resources",
json_schema_extra={
"linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"}
},
)
keytable_idx: int = Field(..., description="""The index to the key in the 'keys' table.""")
resource_name: str = Field(
...,
description="""The name of the online resource (e.g., website, database) that has the entity.""",
)
resource_id: str = Field(
..., description="""The unique identifier for the resource entity at the resource."""
)
uri: str = Field(
...,
description="""The URI for the resource entity this reference applies to. This can be an empty string.""",
)
class ExternalResourcesObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
name: Literal["objects"] = Field(
"objects",
json_schema_extra={
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
object_id: str = Field(..., description="""The UUID for the object.""")
field: str = Field(
...,
description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""",
)
class ExternalResourcesObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
name: Literal["object_keys"] = Field(
"object_keys",
json_schema_extra={
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
objecttable_idx: int = Field(
..., description="""The index to the 'objects' table for the object that holds the key."""
)
keytable_idx: int = Field(..., description="""The index to the 'keys' table for the key.""")
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ExternalResources.model_rebuild()
ExternalResourcesKeys.model_rebuild()
ExternalResourcesResources.model_rebuild()
ExternalResourcesObjects.model_rebuild()
ExternalResourcesObjectKeys.model_rebuild()

View file

@ -0,0 +1,110 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_3_0.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: List[int] = Field(
..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
},
)
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,450 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
version = "1.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VocabData(VectorData):
"""
Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
vocabulary: NDArray[Shape["* null"], str] = Field(
...,
description="""The available items in the controlled vocabulary.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
VocabData.model_rebuild()
DynamicTable.model_rebuild()

View file

@ -0,0 +1,86 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_3_0.hdmf_common_resources import (
ExternalResources,
ExternalResourcesKeys,
ExternalResourcesResources,
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
)
from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_3_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_3_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
VocabData,
DynamicTable,
)
metamodel_version = "None"
version = "1.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.resources",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,110 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_4_0.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: List[int] = Field(
..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
},
)
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,423 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from numpydantic import NDArray, Shape
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
metamodel_version = "None"
version = "1.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
DynamicTable.model_rebuild()

View file

@ -0,0 +1,77 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_4_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
)
metamodel_version = "None"
version = "1.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -6,7 +6,7 @@ import re
import sys import sys
import numpy as np import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container
from pandas import DataFrame from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
@ -198,6 +198,11 @@ class DynamicTableMixin(BaseModel):
rows, cols = item rows, cols = item
if isinstance(cols, (int, slice)): if isinstance(cols, (int, slice)):
cols = self.colnames[cols] cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols) data = self._slice_range(rows, cols)
return DataFrame.from_dict(data) return DataFrame.from_dict(data)
else: else:
@ -210,8 +215,14 @@ class DynamicTableMixin(BaseModel):
cols = self.colnames cols = self.colnames
elif isinstance(cols, str): elif isinstance(cols, str):
cols = [cols] cols = [cols]
data = {}
data = {k: self._columns[k][rows] for k in cols} for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data return data
def __setitem__(self, key: str, value: Any) -> None: def __setitem__(self, key: str, value: Any) -> None:

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.5.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,110 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_5_1.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.5.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: List[int] = Field(
..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
},
)
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,452 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.5.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
DynamicTable.model_rebuild()
AlignedDynamicTable.model_rebuild()

View file

@ -0,0 +1,78 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_5_1.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
metamodel_version = "None"
version = "1.5.1"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.6.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,110 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_6_0.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.6.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: List[int] = Field(
..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
},
)
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,452 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.6.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
DynamicTable.model_rebuild()
AlignedDynamicTable.model_rebuild()

View file

@ -0,0 +1,78 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_6_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
metamodel_version = "None"
version = "1.6.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
metamodel_version = "None"
version = "1.7.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.base/",
"id": "hdmf-common.base",
"imports": ["hdmf-common.nwb.language"],
"name": "hdmf-common.base",
}
)
class Data(ConfiguredBaseModel):
"""
An abstract data type for a dataset.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class Container(ConfiguredBaseModel):
"""
An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
name: str = Field(...)
class SimpleMultiContainer(Container):
"""
A simple Container for holding onto multiple containers.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.base", "tree_root": True}
)
value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Data.model_rebuild()
Container.model_rebuild()
SimpleMultiContainer.model_rebuild()

View file

@ -0,0 +1,110 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_7_0.hdmf_common_base import Container
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.7.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.sparse/",
"id": "hdmf-common.sparse",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.sparse",
}
)
class CSRMatrix(Container):
"""
A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.sparse", "tree_root": True}
)
name: str = Field(...)
shape: List[int] = Field(
..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
},
)
data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
class CSRMatrixData(ConfiguredBaseModel):
"""
The non-zero values in the matrix.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
name: Literal["data"] = Field(
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
CSRMatrix.model_rebuild()
CSRMatrixData.model_rebuild()

View file

@ -0,0 +1,452 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.7.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
class VectorDataMixin(BaseModel):
"""
Mixin class to give VectorData indexing abilities
"""
_index: Optional["VectorIndex"] = None
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
return self._index[item]
else:
return self.value[item]
def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
if self._index:
# Following hdmf, VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class VectorIndexMixin(BaseModel):
"""
Mixin class to give VectorIndex indexing abilities
"""
# redefined in `VectorData`, but included here for testing and type checking
value: Optional[NDArray] = None
target: Optional["VectorData"] = None
def _getitem_helper(self, arg: int) -> Union[list, NDArray]:
"""
Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
"""
start = 0 if arg == 0 else self.value[arg - 1]
end = self.value[arg]
return self.target.array[slice(start, end)]
def __getitem__(self, item: Union[int, slice]) -> Any:
if self.target is None:
return self.value[item]
elif type(self.target).__name__ == "VectorData":
if isinstance(item, int):
return self._getitem_helper(item)
else:
idx = range(*item.indices(len(self.value)))
return [self._getitem_helper(i) for i in idx]
else:
raise NotImplementedError("DynamicTableRange not supported yet")
def __setitem__(self, key: Union[int, slice], value: Any) -> None:
if self._index:
# VectorIndex is the thing that knows how to do the slicing
self._index[key] = value
else:
self.value[key] = value
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
but simplifying along the way :)
"""
model_config = ConfigDict(extra="allow")
__pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]]
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"name",
"colnames",
"description",
)
# overridden by subclass but implemented here for testing and typechecking purposes :)
colnames: List[str] = Field(default_factory=list)
@property
def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
@property
def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]:
return [getattr(self, k) for i, k in enumerate(self.colnames)]
@overload
def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
@overload
def __getitem__(self, item: int) -> DataFrame: ...
@overload
def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
@overload
def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
DataFrame,
list,
"NDArray",
"VectorDataMixin",
]: ...
@overload
def __getitem__(self, item: slice) -> DataFrame: ...
def __getitem__(
self,
item: Union[
str,
int,
slice,
Tuple[int, Union[int, str]],
Tuple[Union[int, slice], ...],
],
) -> Any:
"""
Get an item from the table
If item is...
- ``str`` : get the column with this name
- ``int`` : get the row at this index
- ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
- ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
gets the 0th row from ``colname``
- ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
returns as a :class:`pandas.DataFrame`
"""
if isinstance(item, str):
return self._columns[item]
if isinstance(item, (int, slice)):
return DataFrame.from_dict(self._slice_range(item))
elif isinstance(item, tuple):
if len(item) != 2:
raise ValueError(
"DynamicTables are 2-dimensional, can't index with more than 2 indices like"
f" {item}"
)
# all other cases are tuples of (rows, cols)
rows, cols = item
if isinstance(cols, (int, slice)):
cols = self.colnames[cols]
if isinstance(rows, int) and isinstance(cols, str):
# single scalar value
return self._columns[cols][rows]
data = self._slice_range(rows, cols)
return DataFrame.from_dict(data)
else:
raise ValueError(f"Unsure how to get item with key {item}")
def _slice_range(
self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None
) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
if cols is None:
cols = self.colnames
elif isinstance(cols, str):
cols = [cols]
data = {}
for k in cols:
val = self._columns[k][rows]
if isinstance(val, BaseModel):
# special case where pandas will unpack a pydantic model
# into {n_fields} rows, rather than keeping it in a dict
val = Series([val])
data[k] = val
return data
def __setitem__(self, key: str, value: Any) -> None:
raise NotImplementedError("TODO")
def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
"""
Add a column, appending it to ``colnames``
"""
# don't use this while building the model
if not getattr(self, "__pydantic_complete__", False):
return super().__setattr__(key, value)
if key not in self.model_fields_set and not key.endswith("_index"):
self.colnames.append(key)
return super().__setattr__(key, value)
@model_validator(mode="before")
@classmethod
def create_colnames(cls, model: Dict[str, Any]) -> None:
"""
Construct colnames from arguments.
the model dict is ordered after python3.6, so we can use that minus
anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
"""
if "colnames" not in model:
colnames = [
k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index")
]
model["colnames"] = colnames
else:
# add any columns not explicitly given an order at the end
colnames = [
k
for k in model
if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index")
and k not in model["colnames"]
]
model["colnames"].extend(colnames)
return model
@model_validator(mode="after")
def resolve_targets(self) -> "DynamicTableMixin":
"""
Ensure that any implicitly indexed columns are linked, and create backlinks
"""
for key, col in self._columns.items():
if isinstance(col, VectorData):
# find an index
idx = None
for field_name in self.model_fields_set:
# implicit name-based index
field = getattr(self, field_name)
if isinstance(field, VectorIndex) and (
field_name == f"{key}_index" or field.target is col
):
idx = field
break
if idx is not None:
col._index = idx
idx.target = col
return self
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common.table/",
"id": "hdmf-common.table",
"imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
"name": "hdmf-common.table",
}
)
class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
target: VectorData = Field(
..., description="""Reference to the target dataset that this index applies to."""
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class ElementIdentifiers(Data):
"""
A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
class DynamicTableRegion(VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
table: DynamicTable = Field(
..., description="""Reference to the DynamicTable object that this region applies to."""
)
description: str = Field(
..., description="""Description of what this table region points to."""
)
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
class AlignedDynamicTable(DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-common.table", "tree_root": True}
)
value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
colnames: List[str] = Field(
...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: NDArray[Shape["* num_rows"], int] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
)
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns, including index columns, of this dynamic table."""
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
VectorData.model_rebuild()
VectorIndex.model_rebuild()
ElementIdentifiers.model_rebuild()
DynamicTableRegion.model_rebuild()
DynamicTable.model_rebuild()
AlignedDynamicTable.model_rebuild()

View file

@ -0,0 +1,78 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_7_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
metamodel_version = "None"
version = "1.7.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-common"},
},
"default_prefix": "hdmf-common/",
"description": "Common data structures provided by HDMF",
"id": "hdmf-common",
"imports": [
"hdmf-common.base",
"hdmf-common.table",
"hdmf-common.sparse",
"hdmf-common.nwb.language",
],
"name": "hdmf-common",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -1,9 +1,14 @@
from __future__ import annotations from __future__ import annotations
from datetime import datetime, date
from ...hdmf_common.v1_8_0.hdmf_common_base import Data from decimal import Decimal
from enum import Enum
import re
import sys
import numpy as np
from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container
from pandas import DataFrame, Series from pandas import DataFrame, Series
from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple
from pydantic import BaseModel, ConfigDict, Field, RootModel, model_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
metamodel_version = "None" metamodel_version = "None"

View file

@ -7,7 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData
from numpydantic import NDArray, Shape from numpydantic import NDArray, Shape
metamodel_version = "None" metamodel_version = "None"
@ -55,7 +55,7 @@ linkml_meta = LinkMLMeta(
}, },
"default_prefix": "hdmf-experimental.experimental/", "default_prefix": "hdmf-experimental.experimental/",
"id": "hdmf-experimental.experimental", "id": "hdmf-experimental.experimental",
"imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.experimental", "name": "hdmf-experimental.experimental",
} }
) )

View file

@ -7,7 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data
metamodel_version = "None" metamodel_version = "None"
version = "0.1.0" version = "0.1.0"
@ -53,7 +53,7 @@ linkml_meta = LinkMLMeta(
}, },
"default_prefix": "hdmf-experimental.resources/", "default_prefix": "hdmf-experimental.resources/",
"id": "hdmf-experimental.resources", "id": "hdmf-experimental.resources",
"imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.resources", "name": "hdmf-experimental.resources",
} }
) )

View file

@ -15,15 +15,14 @@ from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import (
ExternalResourcesObjects, ExternalResourcesObjects,
ExternalResourcesObjectKeys, ExternalResourcesObjectKeys,
) )
from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_5_0.hdmf_common_table import ( from ...hdmf_common.v1_4_0.hdmf_common_table import (
VectorData, VectorData,
VectorIndex, VectorIndex,
ElementIdentifiers, ElementIdentifiers,
DynamicTableRegion, DynamicTableRegion,
DynamicTable, DynamicTable,
AlignedDynamicTable,
) )
from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData

View file

@ -0,0 +1,91 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "0.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.experimental/",
"id": "hdmf-experimental.experimental",
"imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.experimental",
}
)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.experimental", "tree_root": True}
)
name: str = Field(...)
elements: VectorData = Field(
...,
description="""Reference to the VectorData object that contains the enumerable elements""",
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
EnumData.model_rebuild()

View file

@ -0,0 +1,199 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data
metamodel_version = "None"
version = "0.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.resources/",
"id": "hdmf-experimental.resources",
"imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.resources",
}
)
class ExternalResources(Container):
"""
A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.resources", "tree_root": True}
)
name: str = Field(...)
keys: ExternalResourcesKeys = Field(
...,
description="""A table for storing user terms that are used to refer to external resources.""",
)
entities: ExternalResourcesEntities = Field(
..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
)
resources: ExternalResourcesResources = Field(
..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
)
objects: ExternalResourcesObjects = Field(
...,
description="""A table for identifying which objects in a file contain references to external resources.""",
)
object_keys: ExternalResourcesObjectKeys = Field(
..., description="""A table for identifying which objects use which keys."""
)
class ExternalResourcesKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["keys"] = Field(
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
key: str = Field(
...,
description="""The user term that maps to one or more resources in the 'resources' table.""",
)
class ExternalResourcesEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["entities"] = Field(
"entities",
json_schema_extra={
"linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
},
)
keys_idx: int = Field(..., description="""The index to the key in the 'keys' table.""")
resources_idx: int = Field(..., description="""The index into the 'resources' table""")
entity_id: str = Field(..., description="""The unique identifier entity.""")
entity_uri: str = Field(
...,
description="""The URI for the entity this reference applies to. This can be an empty string.""",
)
class ExternalResourcesResources(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["resources"] = Field(
"resources",
json_schema_extra={
"linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"}
},
)
resource: str = Field(..., description="""The name of the resource.""")
resource_uri: str = Field(
..., description="""The URI for the resource. This can be an empty string."""
)
class ExternalResourcesObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["objects"] = Field(
"objects",
json_schema_extra={
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
object_id: str = Field(..., description="""The UUID for the object.""")
relative_path: str = Field(
...,
description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""",
)
field: str = Field(
...,
description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
)
class ExternalResourcesObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["object_keys"] = Field(
"object_keys",
json_schema_extra={
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
objects_idx: int = Field(
..., description="""The index to the 'objects' table for the object that holds the key."""
)
keys_idx: int = Field(..., description="""The index to the 'keys' table for the key.""")
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ExternalResources.model_rebuild()
ExternalResourcesKeys.model_rebuild()
ExternalResourcesEntities.model_rebuild()
ExternalResourcesResources.model_rebuild()
ExternalResourcesObjects.model_rebuild()
ExternalResourcesObjectKeys.model_rebuild()

View file

@ -0,0 +1,89 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_experimental.v0_2_0.hdmf_experimental_resources import (
ExternalResources,
ExternalResourcesKeys,
ExternalResourcesEntities,
ExternalResourcesResources,
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
)
from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_5_1.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
from ...hdmf_experimental.v0_2_0.hdmf_experimental_experimental import EnumData
metamodel_version = "None"
version = "0.2.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental/",
"description": (
"Experimental data structures provided by HDMF. These are not "
"guaranteed to be available in the future."
),
"id": "hdmf-experimental",
"imports": [
"hdmf-experimental.experimental",
"hdmf-experimental.resources",
"hdmf-experimental.nwb.language",
],
"name": "hdmf-experimental",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,91 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "0.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.experimental/",
"id": "hdmf-experimental.experimental",
"imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.experimental",
}
)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.experimental", "tree_root": True}
)
name: str = Field(...)
elements: VectorData = Field(
...,
description="""Reference to the VectorData object that contains the enumerable elements""",
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
EnumData.model_rebuild()

View file

@ -0,0 +1,203 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data
metamodel_version = "None"
version = "0.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.resources/",
"id": "hdmf-experimental.resources",
"imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.resources",
}
)
class ExternalResources(Container):
"""
A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.resources", "tree_root": True}
)
name: str = Field(...)
keys: ExternalResourcesKeys = Field(
...,
description="""A table for storing user terms that are used to refer to external resources.""",
)
files: ExternalResourcesFiles = Field(
..., description="""A table for storing object ids of files used in external resources."""
)
entities: ExternalResourcesEntities = Field(
..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
)
objects: ExternalResourcesObjects = Field(
...,
description="""A table for identifying which objects in a file contain references to external resources.""",
)
object_keys: ExternalResourcesObjectKeys = Field(
..., description="""A table for identifying which objects use which keys."""
)
class ExternalResourcesKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["keys"] = Field(
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
key: str = Field(
...,
description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""",
)
class ExternalResourcesFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["files"] = Field(
"files",
json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}},
)
file_object_id: str = Field(
...,
description="""The object id (UUID) of a file that contains objects that refers to external resources.""",
)
class ExternalResourcesEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["entities"] = Field(
"entities",
json_schema_extra={
"linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
},
)
keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""")
entity_id: str = Field(
...,
description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""",
)
entity_uri: str = Field(
...,
description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""",
)
class ExternalResourcesObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["objects"] = Field(
"objects",
json_schema_extra={
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
files_idx: int = Field(
..., description="""The row index to the file in the `files` table containing the object."""
)
object_id: str = Field(..., description="""The object id (UUID) of the object.""")
object_type: str = Field(..., description="""The data type of the object.""")
relative_path: str = Field(
...,
description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""",
)
field: str = Field(
...,
description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
)
class ExternalResourcesObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["object_keys"] = Field(
"object_keys",
json_schema_extra={
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
objects_idx: int = Field(
..., description="""The row index to the object in the `objects` table that holds the key"""
)
keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""")
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ExternalResources.model_rebuild()
ExternalResourcesKeys.model_rebuild()
ExternalResourcesFiles.model_rebuild()
ExternalResourcesEntities.model_rebuild()
ExternalResourcesObjects.model_rebuild()
ExternalResourcesObjectKeys.model_rebuild()

View file

@ -0,0 +1,89 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_experimental.v0_3_0.hdmf_experimental_resources import (
ExternalResources,
ExternalResourcesKeys,
ExternalResourcesFiles,
ExternalResourcesEntities,
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
)
from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_6_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
from ...hdmf_experimental.v0_3_0.hdmf_experimental_experimental import EnumData
metamodel_version = "None"
version = "0.3.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental/",
"description": (
"Experimental data structures provided by HDMF. These are not "
"guaranteed to be available in the future."
),
"id": "hdmf-experimental",
"imports": [
"hdmf-experimental.experimental",
"hdmf-experimental.resources",
"hdmf-experimental.nwb.language",
],
"name": "hdmf-experimental",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -0,0 +1,91 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData
from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "0.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.experimental/",
"id": "hdmf-experimental.experimental",
"imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.experimental",
}
)
class EnumData(VectorData):
"""
Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.experimental", "tree_root": True}
)
name: str = Field(...)
elements: VectorData = Field(
...,
description="""Reference to the VectorData object that contains the enumerable elements""",
)
description: str = Field(..., description="""Description of what these vectors represent.""")
value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
NDArray[Shape["* dim0, * dim1, * dim2"], Any],
NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
]
] = Field(None)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
EnumData.model_rebuild()

View file

@ -0,0 +1,225 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data
metamodel_version = "None"
version = "0.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": False},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental.resources/",
"id": "hdmf-experimental.resources",
"imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.resources",
}
)
class ExternalResources(Container):
"""
A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
{"from_schema": "hdmf-experimental.resources", "tree_root": True}
)
name: str = Field(...)
keys: ExternalResourcesKeys = Field(
...,
description="""A table for storing user terms that are used to refer to external resources.""",
)
files: ExternalResourcesFiles = Field(
..., description="""A table for storing object ids of files used in external resources."""
)
entities: ExternalResourcesEntities = Field(
..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
)
objects: ExternalResourcesObjects = Field(
...,
description="""A table for identifying which objects in a file contain references to external resources.""",
)
object_keys: ExternalResourcesObjectKeys = Field(
..., description="""A table for identifying which objects use which keys."""
)
entity_keys: ExternalResourcesEntityKeys = Field(
..., description="""A table for identifying which keys use which entity."""
)
class ExternalResourcesKeys(Data):
"""
A table for storing user terms that are used to refer to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["keys"] = Field(
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
key: str = Field(
...,
description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""",
)
class ExternalResourcesFiles(Data):
"""
A table for storing object ids of files used in external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["files"] = Field(
"files",
json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}},
)
file_object_id: str = Field(
...,
description="""The object id (UUID) of a file that contains objects that refers to external resources.""",
)
class ExternalResourcesEntities(Data):
"""
A table for mapping user terms (i.e., keys) to resource entities.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["entities"] = Field(
"entities",
json_schema_extra={
"linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
},
)
entity_id: str = Field(
...,
description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""",
)
entity_uri: str = Field(
...,
description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""",
)
class ExternalResourcesObjects(Data):
"""
A table for identifying which objects in a file contain references to external resources.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["objects"] = Field(
"objects",
json_schema_extra={
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
files_idx: int = Field(
..., description="""The row index to the file in the `files` table containing the object."""
)
object_id: str = Field(..., description="""The object id (UUID) of the object.""")
object_type: str = Field(..., description="""The data type of the object.""")
relative_path: str = Field(
...,
description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""",
)
field: str = Field(
...,
description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
)
class ExternalResourcesObjectKeys(Data):
"""
A table for identifying which objects use which keys.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["object_keys"] = Field(
"object_keys",
json_schema_extra={
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
objects_idx: int = Field(
..., description="""The row index to the object in the `objects` table that holds the key"""
)
keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""")
class ExternalResourcesEntityKeys(Data):
"""
A table for identifying which keys use which entity.
"""
linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
name: Literal["entity_keys"] = Field(
"entity_keys",
json_schema_extra={
"linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"}
},
)
entities_idx: int = Field(
..., description="""The row index to the entity in the `entities` table."""
)
keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""")
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ExternalResources.model_rebuild()
ExternalResourcesKeys.model_rebuild()
ExternalResourcesFiles.model_rebuild()
ExternalResourcesEntities.model_rebuild()
ExternalResourcesObjects.model_rebuild()
ExternalResourcesObjectKeys.model_rebuild()
ExternalResourcesEntityKeys.model_rebuild()

View file

@ -0,0 +1,90 @@
from __future__ import annotations
from datetime import datetime, date
from decimal import Decimal
from enum import Enum
import re
import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_experimental.v0_4_0.hdmf_experimental_resources import (
ExternalResources,
ExternalResourcesKeys,
ExternalResourcesFiles,
ExternalResourcesEntities,
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
ExternalResourcesEntityKeys,
)
from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer
from ...hdmf_common.v1_7_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
AlignedDynamicTable,
)
from ...hdmf_experimental.v0_4_0.hdmf_experimental_experimental import EnumData
metamodel_version = "None"
version = "0.4.0"
class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict(
validate_assignment=True,
validate_default=True,
extra="forbid",
arbitrary_types_allowed=True,
use_enum_values=True,
strict=False,
)
hdf5_path: Optional[str] = Field(
None, description="The absolute path that this object is stored in an NWB file"
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
model_config = ConfigDict(frozen=True)
def __getattr__(self, key: str):
return getattr(self.root, key)
def __getitem__(self, key: str):
return self.root[key]
def __setitem__(self, key: str, value):
self.root[key] = value
def __contains__(self, key: str) -> bool:
return key in self.root
linkml_meta = LinkMLMeta(
{
"annotations": {
"is_namespace": {"tag": "is_namespace", "value": True},
"namespace": {"tag": "namespace", "value": "hdmf-experimental"},
},
"default_prefix": "hdmf-experimental/",
"description": (
"Experimental data structures provided by HDMF. These are not "
"guaranteed to be available in the future."
),
"id": "hdmf-experimental",
"imports": [
"hdmf-experimental.experimental",
"hdmf-experimental.resources",
"hdmf-experimental.nwb.language",
],
"name": "hdmf-experimental",
}
)
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

View file

@ -0,0 +1,33 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.2.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,81 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.2.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: a compressed sparse row matrix
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: the shape of this sparse matrix
array:
dimensions:
- alias: 'null'
exact_cardinality: 2
range: int
required: true
multivalued: false
indices:
name: indices
description: column indices
range: CSRMatrix__indices
required: true
multivalued: false
indptr:
name: indptr
description: index pointer
range: CSRMatrix__indptr
required: true
multivalued: false
data:
name: data
description: values in the matrix
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__indices:
name: CSRMatrix__indices
description: column indices
attributes:
name:
name: name
ifabsent: string(indices)
range: string
required: true
equals_string: indices
CSRMatrix__indptr:
name: CSRMatrix__indptr
description: index pointer
attributes:
name:
name: name
ifabsent: string(indptr)
range: string
required: true
equals_string: indptr
CSRMatrix__data:
name: CSRMatrix__data
description: values in the matrix
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,193 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.2.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
VocabData:
name: VocabData
description: Data that come from a controlled vocabulary of text values. A data
value of i corresponds to the i-th element in the 'vocabulary' array attribute.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
vocabulary:
name: vocabulary
description: The available items in the controlled vocabulary.
array:
dimensions:
- alias: 'null'
range: text
required: true
multivalued: false
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.2.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.2.1
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,83 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.2.1
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: a compressed sparse row matrix
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: the shape of this sparse matrix
array:
dimensions:
- alias: 'null'
exact_cardinality: 2
range: int
required: true
multivalued: false
indices:
name: indices
description: column indices
range: CSRMatrix__indices
required: true
multivalued: false
indptr:
name: indptr
description: index pointer
range: CSRMatrix__indptr
required: true
multivalued: false
data:
name: data
description: values in the matrix
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__indices:
name: CSRMatrix__indices
description: column indices
attributes:
name:
name: name
ifabsent: string(indices)
range: string
required: true
equals_string: indices
CSRMatrix__indptr:
name: CSRMatrix__indptr
description: index pointer
attributes:
name:
name: name
ifabsent: string(indptr)
range: string
required: true
equals_string: indptr
CSRMatrix__data:
name: CSRMatrix__data
description: values in the matrix
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,193 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.2.1
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
VocabData:
name: VocabData
description: Data that come from a controlled vocabulary of text values. A data
value of i corresponds to the i-th element in the 'vocabulary' array attribute.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
vocabulary:
name: vocabulary
description: The available items in the controlled vocabulary.
array:
dimensions:
- alias: 'null'
range: text
required: true
multivalued: false
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.2.1
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.3.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,158 @@
name: hdmf-common.resources
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.resources
version: 1.3.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.resources/
classes:
ExternalResources:
name: ExternalResources
description: 'A set of four tables for tracking external resource references in
a file. NOTE: this data type is in beta testing and is subject to change in
a later version.'
is_a: Container
attributes:
name:
name: name
range: string
required: true
keys:
name: keys
description: A table for storing user terms that are used to refer to external
resources.
range: ExternalResources__keys
required: true
multivalued: false
resources:
name: resources
description: A table for mapping user terms (i.e., keys) to resource entities.
range: ExternalResources__resources
required: true
multivalued: false
objects:
name: objects
description: A table for identifying which objects in a file contain references
to external resources.
range: ExternalResources__objects
required: true
multivalued: false
object_keys:
name: object_keys
description: A table for identifying which objects use which keys.
range: ExternalResources__object_keys
required: true
multivalued: false
tree_root: true
ExternalResources__keys:
name: ExternalResources__keys
description: A table for storing user terms that are used to refer to external
resources.
is_a: Data
attributes:
name:
name: name
ifabsent: string(keys)
range: string
required: true
equals_string: keys
key_name:
name: key_name
description: The user term that maps to one or more resources in the 'resources'
table.
range: text
required: true
multivalued: false
ExternalResources__resources:
name: ExternalResources__resources
description: A table for mapping user terms (i.e., keys) to resource entities.
is_a: Data
attributes:
name:
name: name
ifabsent: string(resources)
range: string
required: true
equals_string: resources
keytable_idx:
name: keytable_idx
description: The index to the key in the 'keys' table.
range: uint
required: true
multivalued: false
resource_name:
name: resource_name
description: The name of the online resource (e.g., website, database) that
has the entity.
range: text
required: true
multivalued: false
resource_id:
name: resource_id
description: The unique identifier for the resource entity at the resource.
range: text
required: true
multivalued: false
uri:
name: uri
description: The URI for the resource entity this reference applies to. This
can be an empty string.
range: text
required: true
multivalued: false
ExternalResources__objects:
name: ExternalResources__objects
description: A table for identifying which objects in a file contain references
to external resources.
is_a: Data
attributes:
name:
name: name
ifabsent: string(objects)
range: string
required: true
equals_string: objects
object_id:
name: object_id
description: The UUID for the object.
range: text
required: true
multivalued: false
field:
name: field
description: The field of the object. This can be an empty string if the object
is a dataset and the field is the dataset values.
range: text
required: true
multivalued: false
ExternalResources__object_keys:
name: ExternalResources__object_keys
description: A table for identifying which objects use which keys.
is_a: Data
attributes:
name:
name: name
ifabsent: string(object_keys)
range: string
required: true
equals_string: object_keys
objecttable_idx:
name: objecttable_idx
description: The index to the 'objects' table for the object that holds the
key.
range: uint
required: true
multivalued: false
keytable_idx:
name: keytable_idx
description: The index to the 'keys' table for the key.
range: uint
required: true
multivalued: false

View file

@ -0,0 +1,68 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.3.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
required: true
multivalued: true
indices:
name: indices
description: The column indices.
array:
dimensions:
- alias: number_of_non_zero_values
range: uint
required: true
multivalued: false
indptr:
name: indptr
description: The row index pointer.
array:
dimensions:
- alias: number_of_rows_in_the_matrix_1
range: uint
required: true
multivalued: false
data:
name: data
description: The non-zero values in the matrix.
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,193 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.3.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
VocabData:
name: VocabData
description: Data that come from a controlled vocabulary of text values. A data
value of i corresponds to the i-th element in the 'vocabulary' array attribute.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
vocabulary:
name: vocabulary
description: The available items in the controlled vocabulary.
array:
dimensions:
- alias: 'null'
range: text
required: true
multivalued: false
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true

View file

@ -0,0 +1,18 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.3.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.resources
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.4.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,68 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.4.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
required: true
multivalued: true
indices:
name: indices
description: The column indices.
array:
dimensions:
- alias: number_of_non_zero_values
range: uint
required: true
multivalued: false
indptr:
name: indptr
description: The row index pointer.
array:
dimensions:
- alias: number_of_rows_in_the_matrix_1
range: uint
required: true
multivalued: false
data:
name: data
description: The non-zero values in the matrix.
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,173 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.4.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.4.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.5.1
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,68 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.5.1
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
required: true
multivalued: true
indices:
name: indices
description: The column indices.
array:
dimensions:
- alias: number_of_non_zero_values
range: uint
required: true
multivalued: false
indptr:
name: indptr
description: The row index pointer.
array:
dimensions:
- alias: number_of_rows_in_the_matrix_1
range: uint
required: true
multivalued: false
data:
name: data
description: The non-zero values in the matrix.
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,192 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.5.1
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
description: DynamicTable container that supports storing a collection of sub-tables.
Each sub-table is a DynamicTable itself that is aligned with the main table
by row index. I.e., all DynamicTables stored in this group MUST have the same
number of rows. This type effectively defines a 2-level table in which the main
data is stored in the main table implemented by this type and additional columns
of the table are grouped into categories, with each category being represented
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: DynamicTable
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.5.1
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.6.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,68 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.6.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
required: true
multivalued: true
indices:
name: indices
description: The column indices.
array:
dimensions:
- alias: number_of_non_zero_values
range: uint
required: true
multivalued: false
indptr:
name: indptr
description: The row index pointer.
array:
dimensions:
- alias: number_of_rows_in_the_matrix_1
range: uint
required: true
multivalued: false
data:
name: data
description: The non-zero values in the matrix.
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,192 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.6.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
description: DynamicTable container that supports storing a collection of sub-tables.
Each sub-table is a DynamicTable itself that is aligned with the main table
by row index. I.e., all DynamicTables stored in this group MUST have the same
number of rows. This type effectively defines a 2-level table in which the main
data is stored in the main table implemented by this type and additional columns
of the table are grouped into categories, with each category being represented
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: DynamicTable
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.6.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -0,0 +1,46 @@
name: hdmf-common.base
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.base
version: 1.7.0
imports:
- hdmf-common.nwb.language
default_prefix: hdmf-common.base/
classes:
Data:
name: Data
description: An abstract data type for a dataset.
attributes:
name:
name: name
range: string
required: true
tree_root: true
Container:
name: Container
description: An abstract data type for a group storing collections of data and
metadata. Base type for all data and metadata containers.
attributes:
name:
name: name
range: string
required: true
tree_root: true
SimpleMultiContainer:
name: SimpleMultiContainer
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: Container
tree_root: true

View file

@ -0,0 +1,94 @@
name: hdmf-common.nwb.language
annotations:
is_namespace:
tag: is_namespace
value: 'False'
namespace:
tag: namespace
value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
- linkml:types
prefixes:
linkml:
prefix_prefix: linkml
prefix_reference: https://w3id.org/linkml
default_prefix: nwb.language/
types:
float32:
name: float32
typeof: float
float64:
name: float64
typeof: double
long:
name: long
typeof: integer
int64:
name: int64
typeof: integer
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
int16:
name: int16
typeof: integer
short:
name: short
typeof: integer
int8:
name: int8
typeof: integer
uint:
name: uint
typeof: integer
minimum_value: 0
uint32:
name: uint32
typeof: integer
minimum_value: 0
uint16:
name: uint16
typeof: integer
minimum_value: 0
uint8:
name: uint8
typeof: integer
minimum_value: 0
uint64:
name: uint64
typeof: integer
minimum_value: 0
numeric:
name: numeric
typeof: float
text:
name: text
typeof: string
utf:
name: utf
typeof: string
utf8:
name: utf8
typeof: string
utf_8:
name: utf_8
typeof: string
ascii:
name: ascii
typeof: string
bool:
name: bool
typeof: boolean
isodatetime:
name: isodatetime
typeof: datetime
classes:
AnyType:
name: AnyType
description: Needed because some classes in hdmf-common are datasets without dtype
class_uri: linkml:Any

View file

@ -0,0 +1,68 @@
name: hdmf-common.sparse
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.sparse
version: 1.7.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.sparse/
classes:
CSRMatrix:
name: CSRMatrix
description: A compressed sparse row matrix. Data are stored in the standard CSR
format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
is_a: Container
attributes:
name:
name: name
range: string
required: true
shape:
name: shape
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
required: true
multivalued: true
indices:
name: indices
description: The column indices.
array:
dimensions:
- alias: number_of_non_zero_values
range: uint
required: true
multivalued: false
indptr:
name: indptr
description: The row index pointer.
array:
dimensions:
- alias: number_of_rows_in_the_matrix_1
range: uint
required: true
multivalued: false
data:
name: data
description: The non-zero values in the matrix.
range: CSRMatrix__data
required: true
multivalued: false
tree_root: true
CSRMatrix__data:
name: CSRMatrix__data
description: The non-zero values in the matrix.
attributes:
name:
name: name
ifabsent: string(data)
range: string
required: true
equals_string: data

View file

@ -0,0 +1,192 @@
name: hdmf-common.table
annotations:
is_namespace:
tag: is_namespace
value: false
namespace:
tag: namespace
value: hdmf-common
id: hdmf-common.table
version: 1.7.0
imports:
- hdmf-common.base
- hdmf-common.nwb.language
default_prefix: hdmf-common.table/
classes:
VectorData:
name: VectorData
description: An n-dimensional dataset representing a column of a DynamicTable.
If used without an accompanying VectorIndex, first dimension is along the rows
of the DynamicTable and each step along the first dimension is a cell of the
larger table. VectorData can also be used to represent a ragged array if paired
with a VectorIndex. This allows for storing arrays of varying length in a single
cell of the DynamicTable by indexing into this VectorData. The first vector
is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
and so on.
is_a: Data
attributes:
name:
name: name
range: string
required: true
description:
name: description
description: Description of what these vectors represent.
range: text
required: true
value:
name: value
range: AnyType
any_of:
- array:
dimensions:
- alias: dim0
- array:
dimensions:
- alias: dim0
- alias: dim1
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- array:
dimensions:
- alias: dim0
- alias: dim1
- alias: dim2
- alias: dim3
tree_root: true
VectorIndex:
name: VectorIndex
description: Used with VectorData to encode a ragged array. An array of indices
into the first dimension of the target VectorData, and forming a map between
the rows of a DynamicTable and the indices of the VectorData. The name of the
VectorIndex is expected to be the name of the target VectorData object followed
by "_index".
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
target:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
description: A list of unique identifiers for values within a dataset, e.g. rows
of a DynamicTable.
is_a: Data
attributes:
name:
name: name
ifabsent: string(element_id)
range: string
required: true
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion
description: DynamicTableRegion provides a link from one table to an index or
region of another. The `table` attribute is a link to another `DynamicTable`,
indicating which table is referenced, and the data is int(s) indicating the
row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
associate rows with repeated meta-data without data duplication. They can also
be used to create hierarchical relationships between multiple `DynamicTable`s.
`DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
ragged references, so a single cell of a `DynamicTable` can reference many rows
of another `DynamicTable`.
is_a: VectorData
attributes:
name:
name: name
range: string
required: true
table:
name: table
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
required: true
description:
name: description
description: Description of what this table region points to.
range: text
required: true
tree_root: true
DynamicTable:
name: DynamicTable
description: A group containing multiple datasets that are aligned on the first
dimension (Currently, this requirement if left up to APIs to check and enforce).
These datasets represent different columns in the table. Apart from a column
that contains unique identifiers for each row, there are no other required datasets.
Users are free to add any number of custom VectorData objects (columns) here.
DynamicTable also supports ragged array columns, where each element can be of
a different size. To add a ragged array column, use a VectorIndex type to index
the corresponding VectorData type. See documentation for VectorData and VectorIndex
for more details. Unlike a compound data type, which is analogous to storing
an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
This provides an alternative structure to choose from when optimizing storage
for anticipated access patterns. Additionally, this type provides a way of creating
a table without having to define a compound type up front. Although this convenience
may be attractive, users should think carefully about how data will be accessed.
DynamicTable is more appropriate for column-centric access, whereas a dataset
with a compound type would be more appropriate for row-centric access. Finally,
data size should also be taken into account. For small tables, performance loss
may be an acceptable trade-off for the flexibility of a DynamicTable.
is_a: Container
attributes:
name:
name: name
range: string
required: true
colnames:
name: colnames
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
required: true
multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
array:
dimensions:
- alias: num_rows
range: int
required: true
multivalued: false
vector_data:
name: vector_data
description: Vector columns, including index columns, of this dynamic table.
range: VectorData
required: false
multivalued: true
tree_root: true
AlignedDynamicTable:
name: AlignedDynamicTable
description: DynamicTable container that supports storing a collection of sub-tables.
Each sub-table is a DynamicTable itself that is aligned with the main table
by row index. I.e., all DynamicTables stored in this group MUST have the same
number of rows. This type effectively defines a 2-level table in which the main
data is stored in the main table implemented by this type and additional columns
of the table are grouped into categories, with each category being represented
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
value:
name: value
multivalued: true
inlined: true
inlined_as_list: false
any_of:
- range: DynamicTable
tree_root: true

View file

@ -0,0 +1,17 @@
name: hdmf-common
annotations:
is_namespace:
tag: is_namespace
value: true
namespace:
tag: namespace
value: hdmf-common
description: Common data structures provided by HDMF
id: hdmf-common
version: 1.7.0
imports:
- hdmf-common.base
- hdmf-common.table
- hdmf-common.sparse
- hdmf-common.nwb.language
default_prefix: hdmf-common/

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

View file

@ -9,7 +9,7 @@ annotations:
id: hdmf-experimental.experimental id: hdmf-experimental.experimental
version: 0.1.0 version: 0.1.0
imports: imports:
- ../../hdmf_common/v1_5_0/namespace - ../../hdmf_common/v1_4_0/namespace
- hdmf-experimental.nwb.language - hdmf-experimental.nwb.language
default_prefix: hdmf-experimental.experimental/ default_prefix: hdmf-experimental.experimental/
classes: classes:

View file

@ -5,7 +5,7 @@ annotations:
value: 'False' value: 'False'
namespace: namespace:
tag: namespace tag: namespace
value: core value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language id: nwb.language
imports: imports:

Some files were not shown because too many files have changed in this diff Show more