diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index fdad9a8..7f8fc5e 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -2,6 +2,8 @@ name: Lint
on:
push:
+ branches:
+ - main
pull_request:
branches: [main]
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 1b3002d..ce2fffc 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -2,6 +2,11 @@ name: Tests
on:
push:
+ branches:
+ - main
+ pull_request:
+ branches:
+ - main
jobs:
test:
@@ -34,8 +39,20 @@ jobs:
run: pytest
working-directory: nwb_linkml
- - name: Report coverage
- working-directory: nwb_linkml
- run: "coveralls --service=github"
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Coveralls Parallel
+ uses: coverallsapp/github-action@v2.3.0
+ if: runner.os != 'macOS'
+ with:
+ flag-name: run-${{ join(matrix.*, '-') }}
+ parallel: true
+ debug: true
+
+ finish-coverage:
+ needs: test
+ if: ${{ always() }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Coveralls Finished
+ uses: coverallsapp/github-action@v2.3.0
+ with:
+ parallel-finished: true
\ No newline at end of file
diff --git a/README.md b/README.md
index 899d444..ad3f28c 100644
--- a/README.md
+++ b/README.md
@@ -9,4 +9,4 @@ Translating NWB schema language to linkml
Just submitting to pypi to squat the package name
-[![Coverage Status](https://coveralls.io/repos/github/p2p-ld/nwb-linkml/badge.svg)](https://coveralls.io/github/p2p-ld/nwb-linkml)
\ No newline at end of file
+[![Coverage Status](https://coveralls.io/repos/github/p2p-ld/nwb-linkml/badge.svg)](https://coveralls.io/github/p2p-ld/nwb-linkml)
\ No newline at end of file
diff --git a/docs/index.md b/docs/index.md
index 9ffc483..32ed37e 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -284,6 +284,7 @@ api/nwb_linkml/schema/index
meta/todo
meta/changelog
+meta/references
genindex
```
diff --git a/docs/intro/translation.md b/docs/intro/translation.md
index d5c078a..899dfbe 100644
--- a/docs/intro/translation.md
+++ b/docs/intro/translation.md
@@ -20,6 +20,11 @@
### DynamicTable
+```{note}
+See the [DynamicTable](https://hdmf-common-schema.readthedocs.io/en/stable/format_description.html#dynamictable)
+reference docs
+```
+
One of the major special cases in NWB is the use of `DynamicTable` to contain tabular data that
contains columns that are not in the base spec.
@@ -284,8 +289,35 @@ When generating pydantic models we...
There are several different ways to create references between objects in nwb/hdmf:
-- ...
+- [`links`](https://schema-language.readthedocs.io/en/latest/description.html#sec-link-spec) are group-level
+ properties that can reference other groups or datasets like this:
+ ```yaml
+ links:
+ - name: Link name
+ doc: Required string with the description of the link
+ target_type: Type of target
+ quantity: Optional quantity identifier for the group (default=1).
+ ```
+- [Reference `dtype`](https://schema-language.readthedocs.io/en/latest/description.html#reference-dtype)s are
+ dataset, and attribute-level properties that can reference both other objects and regions within other objects:
+ ```yaml
+ dtype:
+ target_type: ElectrodeGroup
+ reftype: object
+ ```
+- `TimeSeriesReferenceVectorData` is a compound dtype that behaves like VectorData and VectorIndex combined
+ into a single type. It is slightly different in that each row of the vector can refer to a different table,
+ and has a different way of handling selection (with `start` and `count`
+ rather than a series of indices for the end of each cell)
+- Implicitly, hdmf creates references between objects according to some naming conventions, eg.
+ an attribute/dataset that is a `VectorIndex` named `mydata_index` will be linked to a `VectorData`
+ object `mydata`.
+- There is currently a note in the schema language docs that there will be an additional
+ [Relationships](https://schema-language.readthedocs.io/en/latest/description.html#relationships) system
+ that explicitly models relationships, but it is unclear how that would be different than references.
+We represent all of these by just directly referring to the object type, preserving the source type
+in an annotation, when necessary.
## LinkML to Everything
diff --git a/docs/meta/references.md b/docs/meta/references.md
new file mode 100644
index 0000000..dd36a1a
--- /dev/null
+++ b/docs/meta/references.md
@@ -0,0 +1,11 @@
+# References
+
+## Documentation
+
+- [hdmf](https://hdmf.readthedocs.io/en/stable/)
+- [hdmf-common-schema](https://hdmf-common-schema.readthedocs.io/en/stable/)
+- [pynwb](https://pynwb.readthedocs.io/en/latest/)
+
+```{todo}
+Add the bibtex refs to NWB papers :)
+```
\ No newline at end of file
diff --git a/docs/meta/todo.md b/docs/meta/todo.md
index 6508c62..d2bf9ac 100644
--- a/docs/meta/todo.md
+++ b/docs/meta/todo.md
@@ -7,6 +7,7 @@ NWB schema translation
- handle compound `dtype` like in ophys.PlaneSegmentation.pixel_mask
- handle compound `dtype` like in TimeSeriesReferenceVectorData
- Create a validator that checks if all the lists in a compound dtype dataset are same length
+- [ ] Move making `target` optional in vectorIndex from pydantic generator to linkml generators!
Cleanup
- [ ] Update pydantic generator
@@ -22,7 +23,7 @@ Cleanup
- [ ] Make a minimal pydanticgen-only package to slim linkml deps?
- [ ] Disambiguate "maps" terminology - split out simple maps from the eg. dataset mapping classes
-- [ ] Remove unnecessary imports
+- [x] Remove unnecessary imports
- dask
- nptyping
- [ ] Adapt the split generation to the new split generator style
diff --git a/docs/pdm.lock b/docs/pdm.lock
index db4920e..0dcaca2 100644
--- a/docs/pdm.lock
+++ b/docs/pdm.lock
@@ -3,9 +3,12 @@
[metadata]
groups = ["default"]
-strategy = ["cross_platform", "inherit_metadata"]
-lock_version = "4.4.2"
-content_hash = "sha256:42dbf7249d28b6d1ad06ccd4593eac866228ad47d884ea8f4b118d5331da2aa0"
+strategy = ["inherit_metadata"]
+lock_version = "4.5.0"
+content_hash = "sha256:6d950420dfea0b5a96435adc77fb4ee411fcada547a1c55e6f1dc74f6ba6b199"
+
+[[metadata.targets]]
+requires_python = ">=3.10,<3.13"
[[package]]
name = "alabaster"
@@ -24,6 +27,9 @@ version = "0.7.0"
requires_python = ">=3.8"
summary = "Reusable constraint types to use with typing.Annotated"
groups = ["default"]
+dependencies = [
+ "typing-extensions>=4.0.0; python_version < \"3.9\"",
+]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -34,6 +40,9 @@ name = "antlr4-python3-runtime"
version = "4.9.3"
summary = "ANTLR 4.9.3 runtime for Python 3.7"
groups = ["default"]
+dependencies = [
+ "typing; python_version < \"3.5\"",
+]
files = [
{file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"},
]
@@ -89,6 +98,7 @@ summary = "Annotate AST trees with source code positions"
groups = ["default"]
dependencies = [
"six>=1.12.0",
+ "typing; python_version < \"3.5\"",
]
files = [
{file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
@@ -97,13 +107,16 @@ files = [
[[package]]
name = "attrs"
-version = "23.2.0"
+version = "24.2.0"
requires_python = ">=3.7"
summary = "Classes Without Boilerplate"
groups = ["default"]
+dependencies = [
+ "importlib-metadata; python_version < \"3.8\"",
+]
files = [
- {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
- {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
+ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
+ {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
]
[[package]]
@@ -114,6 +127,7 @@ summary = "Seamlessly integrate pydantic models in your Sphinx documentation."
groups = ["default"]
dependencies = [
"Sphinx>=4.0",
+ "importlib-metadata>1; python_version <= \"3.8\"",
"pydantic-settings<3.0.0,>=2.0",
"pydantic<3.0.0,>=2.0",
]
@@ -127,6 +141,9 @@ version = "2.15.0"
requires_python = ">=3.8"
summary = "Internationalization utilities"
groups = ["default"]
+dependencies = [
+ "pytz>=2015.7; python_version < \"3.9\"",
+]
files = [
{file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"},
{file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"},
@@ -147,41 +164,35 @@ files = [
]
[[package]]
-name = "blosc2"
-version = "2.7.0"
-requires_python = "<4,>=3.10"
-summary = "Python wrapper for the C-Blosc2 library"
+name = "black"
+version = "24.8.0"
+requires_python = ">=3.8"
+summary = "The uncompromising code formatter."
groups = ["default"]
dependencies = [
- "msgpack",
- "ndindex>=1.4",
- "numexpr",
- "numpy>=1.20.3",
- "py-cpuinfo",
+ "click>=8.0.0",
+ "mypy-extensions>=0.4.3",
+ "packaging>=22.0",
+ "pathspec>=0.9.0",
+ "platformdirs>=2",
+ "tomli>=1.1.0; python_version < \"3.11\"",
+ "typing-extensions>=4.0.1; python_version < \"3.11\"",
]
files = [
- {file = "blosc2-2.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aa71042277956199676169335eb64aa76e33adac5a22289eccdb7d10edf402b6"},
- {file = "blosc2-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:18e3c4c95fe40ea9cda88c784d96e4efc8ddf53f94074cf46daa2e91c9ae5137"},
- {file = "blosc2-2.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ac66ce25214b0b2e53beda9bc6f333dba16f2667649b1026ae041511b5a07d"},
- {file = "blosc2-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:928a89851b8528ce9c233048d832be5b2fef47645d5a389c021f3f58333fa3f8"},
- {file = "blosc2-2.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a9518b7bbaa0f9903a5a921abe6abb0faa56b0e0ad2da0416ff3a486a4b2e0aa"},
- {file = "blosc2-2.7.0-cp310-cp310-win32.whl", hash = "sha256:488dc4be3b6894967a7189952634644f8da46c4bab7734719d379cdf5b440dc0"},
- {file = "blosc2-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:17dd39f62f1686a170232ac8bcba40358ef67e919a91fe840ac71a45d067df30"},
- {file = "blosc2-2.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:565701ad336946a7ef12250def97aae2257de1da34ac8cd570be91b664a03d30"},
- {file = "blosc2-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b640fe2d1d39af2dccffe5e100ef94d21940bfb7f0af44ba17fef718671b267"},
- {file = "blosc2-2.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:634bc22f17ae47a166b8201c77ba11bc160d9997ace51fc820cb3cbd285d47f8"},
- {file = "blosc2-2.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d4b208d5f5947d3062d3353717c43e0ea8e6ccdecdcd30737d5305628e0062b"},
- {file = "blosc2-2.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fd3ca9a61bce4e4dc8006b613fa9dd8982f71e01fa9f593d6cc44d9fdbb56174"},
- {file = "blosc2-2.7.0-cp311-cp311-win32.whl", hash = "sha256:4518944374880d822f9ca90d4473bfa9f4d884b462f78365e224c2b291962e44"},
- {file = "blosc2-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:05d40ede9cf0ecb25500cfe9bebe190e75f246eb1fcd7bd358ac1acfef44ee7a"},
- {file = "blosc2-2.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:729305b06e76b0c95b0ea5090aa7ec87eff72ca43e194283e0cccee92bbdd1e6"},
- {file = "blosc2-2.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64a26c9f7a4a5ddc5721a75b37f913f9e21c0dab96d8c152a64f8faf8659e9ee"},
- {file = "blosc2-2.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:770733ce68d82674d1f80961fe56f3c2d914d8ea4de036af3888a22479add97d"},
- {file = "blosc2-2.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6a700f9324b37e814c5633c43b081c60962f4dd59c0340cefe5f61f9f0411fd"},
- {file = "blosc2-2.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1546c04d25ce793fa0fd7a83999bbb576ff84ef474fb45801f0b6dd76b84803c"},
- {file = "blosc2-2.7.0-cp312-cp312-win32.whl", hash = "sha256:407896867032a760dcce6c25d5e5a56b6fe5235245e065e2549697f69b5117c6"},
- {file = "blosc2-2.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:62d2a6eaf1be1858993a4d7b2b8efd2ede5c4eaabe030c611cd075d907aa5400"},
- {file = "blosc2-2.7.0.tar.gz", hash = "sha256:9b982c1d40560eefb4a01d67c57e786d39a5ee9696f3deadd32ebf5f8885eb2a"},
+ {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
+ {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
+ {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
+ {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
+ {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
+ {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
+ {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
+ {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
+ {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
+ {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
+ {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
+ {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
+ {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
+ {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
]
[[package]]
@@ -197,7 +208,7 @@ files = [
[[package]]
name = "cffi"
-version = "1.16.0"
+version = "1.17.0"
requires_python = ">=3.8"
summary = "Foreign Function Interface for Python calling C code."
groups = ["default"]
@@ -206,39 +217,42 @@ dependencies = [
"pycparser",
]
files = [
- {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
- {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
- {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
- {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
- {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
- {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
- {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
- {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
- {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
- {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+ {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"},
+ {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"},
+ {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"},
+ {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"},
+ {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"},
+ {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"},
+ {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"},
+ {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"},
+ {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"},
+ {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"},
+ {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"},
+ {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"},
+ {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"},
+ {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"},
+ {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"},
+ {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"},
+ {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"},
+ {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"},
+ {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"},
+ {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"},
]
[[package]]
@@ -328,23 +342,13 @@ summary = "Composable command line interface toolkit"
groups = ["default"]
dependencies = [
"colorama; platform_system == \"Windows\"",
+ "importlib-metadata; python_version < \"3.8\"",
]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
-[[package]]
-name = "cloudpickle"
-version = "3.0.0"
-requires_python = ">=3.8"
-summary = "Pickler class to extend the standard pickle.Pickler functionality"
-groups = ["default"]
-files = [
- {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"},
- {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"},
-]
-
[[package]]
name = "colorama"
version = "0.4.6"
@@ -372,7 +376,7 @@ files = [
[[package]]
name = "curies"
-version = "0.7.9"
+version = "0.7.10"
requires_python = ">=3.8"
summary = "Idiomatic conversion between URIs and compact URIs (CURIEs)."
groups = ["default"]
@@ -382,52 +386,31 @@ dependencies = [
"requests",
]
files = [
- {file = "curies-0.7.9-py3-none-any.whl", hash = "sha256:e4c5beb91642376953c94db0ee2fb5d2b011c3b16749516436114ba61442f260"},
- {file = "curies-0.7.9.tar.gz", hash = "sha256:3b63c5fea7b0e967629a3a384b1a8c59b56c503487c1dcbacddeab59e25db4d8"},
-]
-
-[[package]]
-name = "dask"
-version = "2024.7.0"
-requires_python = ">=3.9"
-summary = "Parallel PyData with Task Scheduling"
-groups = ["default"]
-dependencies = [
- "click>=8.1",
- "cloudpickle>=1.5.0",
- "fsspec>=2021.09.0",
- "importlib-metadata>=4.13.0; python_version < \"3.12\"",
- "packaging>=20.0",
- "partd>=1.4.0",
- "pyyaml>=5.3.1",
- "toolz>=0.10.0",
-]
-files = [
- {file = "dask-2024.7.0-py3-none-any.whl", hash = "sha256:0f30f218a1fe1c8e9a6ba8add1207088ba9ff049098d4ea4ce045fd5ff7ca914"},
- {file = "dask-2024.7.0.tar.gz", hash = "sha256:0060bae9a58b5b3ce7e0d97040e903b4d3db09ba49222101cfc40f9834a8a6bc"},
+ {file = "curies-0.7.10-py3-none-any.whl", hash = "sha256:ad80f420dd76b6f3e921a245370ff6ab7473c48c29c17254970c03cd2e58af5f"},
+ {file = "curies-0.7.10.tar.gz", hash = "sha256:98a7ceb94710fab3a02727a7f85ba0719dd22be5fc8b5f2ad1d7d4cfc47d64ce"},
]
[[package]]
name = "debugpy"
-version = "1.8.2"
+version = "1.8.5"
requires_python = ">=3.8"
summary = "An implementation of the Debug Adapter Protocol for Python"
groups = ["default"]
files = [
- {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"},
- {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"},
- {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"},
- {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"},
- {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"},
- {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"},
- {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"},
- {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"},
- {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"},
- {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"},
- {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"},
- {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"},
- {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"},
- {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"},
+ {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"},
+ {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"},
+ {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"},
+ {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"},
+ {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"},
+ {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"},
+ {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"},
+ {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"},
+ {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"},
+ {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"},
+ {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"},
+ {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"},
+ {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"},
+ {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"},
]
[[package]]
@@ -479,14 +462,14 @@ files = [
[[package]]
name = "exceptiongroup"
-version = "1.2.1"
+version = "1.2.2"
requires_python = ">=3.7"
summary = "Backport of PEP 654 (exception groups)"
groups = ["default"]
marker = "python_version < \"3.11\""
files = [
- {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
- {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"},
+ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+ {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]
[[package]]
@@ -516,25 +499,17 @@ version = "1.5.1"
requires_python = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4"
summary = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers"
groups = ["default"]
+dependencies = [
+ "cached-property>=1.3.0; python_version < \"3.8\"",
+]
files = [
{file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"},
{file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"},
]
-[[package]]
-name = "fsspec"
-version = "2024.6.1"
-requires_python = ">=3.8"
-summary = "File-system specification"
-groups = ["default"]
-files = [
- {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"},
- {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"},
-]
-
[[package]]
name = "furo"
-version = "2024.5.6"
+version = "2024.8.6"
requires_python = ">=3.8"
summary = "A clean customisable Sphinx documentation theme."
groups = ["default"]
@@ -542,11 +517,11 @@ dependencies = [
"beautifulsoup4",
"pygments>=2.7",
"sphinx-basic-ng>=1.0.0.beta2",
- "sphinx<8.0,>=6.0",
+ "sphinx<9.0,>=6.0",
]
files = [
- {file = "furo-2024.5.6-py3-none-any.whl", hash = "sha256:490a00d08c0a37ecc90de03ae9227e8eb5d6f7f750edf9807f398a2bdf2358de"},
- {file = "furo-2024.5.6.tar.gz", hash = "sha256:81f205a6605ebccbb883350432b4831c0196dd3d1bc92f61e1f459045b3d2b0b"},
+ {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"},
+ {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"},
]
[[package]]
@@ -604,6 +579,9 @@ version = "0.14.0"
requires_python = ">=3.7"
summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
groups = ["default"]
+dependencies = [
+ "typing-extensions; python_version < \"3.8\"",
+]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -669,16 +647,17 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "8.0.0"
+version = "8.2.0"
requires_python = ">=3.8"
summary = "Read metadata from Python packages"
groups = ["default"]
dependencies = [
+ "typing-extensions>=3.6.4; python_version < \"3.8\"",
"zipp>=0.5",
]
files = [
- {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"},
- {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"},
+ {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"},
+ {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"},
]
[[package]]
@@ -900,7 +879,9 @@ summary = "An implementation of JSON Schema validation for Python"
groups = ["default"]
dependencies = [
"attrs>=22.2.0",
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
"jsonschema-specifications>=2023.03.6",
+ "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"",
"referencing>=0.28.4",
"rpds-py>=0.7.1",
]
@@ -916,6 +897,7 @@ requires_python = ">=3.8"
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
groups = ["default"]
dependencies = [
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
"referencing>=0.31.0",
]
files = [
@@ -974,6 +956,7 @@ requires_python = ">=3.8"
summary = "Jupyter protocol implementation and client libraries"
groups = ["default"]
dependencies = [
+ "importlib-metadata>=4.8.3; python_version < \"3.10\"",
"jupyter-core!=5.0.*,>=4.12",
"python-dateutil>=2.8.2",
"pyzmq>=23.0",
@@ -1017,8 +1000,8 @@ name = "linkml"
version = "0.0.0"
requires_python = "<4.0.0,>=3.8.1"
git = "https://github.com/sneakers-the-rat/linkml"
-ref = "arrays-numpydantic"
-revision = "b70daae67170c5a5e321b2aa24a2db4237c87e4f"
+ref = "nwb-linkml"
+revision = "0a6578bff4713688260f64b3076b197bd6decce9"
summary = "Linked Open Data Modeling Language"
groups = ["default"]
dependencies = [
@@ -1045,6 +1028,7 @@ dependencies = [
"rdflib>=6.0.0",
"requests>=2.22",
"sqlalchemy>=1.4.31",
+ "typing-extensions>=4.4.0; python_version < \"3.9\"",
"watchdog>=0.9.0",
]
@@ -1092,17 +1076,6 @@ files = [
{file = "linkml_runtime-1.8.0.tar.gz", hash = "sha256:436381a7bf791e9af4ef0a5adcac86762d451b77670fbdb3ba083d2c177fb5f2"},
]
-[[package]]
-name = "locket"
-version = "1.0.0"
-requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-summary = "File-based locks for Python on Linux and Windows"
-groups = ["default"]
-files = [
- {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"},
- {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"},
-]
-
[[package]]
name = "markdown-it-py"
version = "3.0.0"
@@ -1197,46 +1170,14 @@ files = [
]
[[package]]
-name = "msgpack"
-version = "1.0.8"
-requires_python = ">=3.8"
-summary = "MessagePack serializer"
+name = "mypy-extensions"
+version = "1.0.0"
+requires_python = ">=3.5"
+summary = "Type system extensions for programs checked with the mypy type checker."
groups = ["default"]
files = [
- {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"},
- {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"},
- {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"},
- {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"},
- {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"},
- {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"},
- {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"},
- {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"},
- {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"},
- {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"},
- {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"},
- {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"},
- {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"},
- {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"},
- {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"},
- {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"},
- {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"},
- {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"},
- {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"},
- {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"},
- {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"},
- {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"},
- {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"},
- {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"},
- {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"},
- {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"},
- {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"},
- {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"},
- {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"},
- {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"},
- {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"},
- {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"},
- {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"},
- {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"},
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
@@ -1262,21 +1203,21 @@ dependencies = [
[[package]]
name = "myst-parser"
-version = "3.0.1"
-requires_python = ">=3.8"
+version = "4.0.0"
+requires_python = ">=3.10"
summary = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser,"
groups = ["default"]
dependencies = [
- "docutils<0.22,>=0.18",
+ "docutils<0.22,>=0.19",
"jinja2",
"markdown-it-py~=3.0",
- "mdit-py-plugins~=0.4",
+ "mdit-py-plugins>=0.4.1,~=0.4",
"pyyaml",
- "sphinx<8,>=6",
+ "sphinx<9,>=7",
]
files = [
- {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"},
- {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"},
+ {file = "myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d"},
+ {file = "myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531"},
]
[[package]]
@@ -1313,17 +1254,6 @@ files = [
{file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"},
]
-[[package]]
-name = "ndindex"
-version = "1.8"
-requires_python = ">=3.8"
-summary = "A Python library for manipulating indices of ndarrays."
-groups = ["default"]
-files = [
- {file = "ndindex-1.8-py3-none-any.whl", hash = "sha256:b5132cd331f3e4106913ed1a974a3e355967a5991543c2f512b40cb8bb9f50b8"},
- {file = "ndindex-1.8.tar.gz", hash = "sha256:5fc87ebc784605f01dd5367374cb40e8da8f2c30988968990066c5098a7eebe8"},
-]
-
[[package]]
name = "nest-asyncio"
version = "1.6.0"
@@ -1335,105 +1265,60 @@ files = [
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
]
-[[package]]
-name = "nptyping"
-version = "2.5.0"
-requires_python = ">=3.7"
-summary = "Type hints for NumPy."
-groups = ["default"]
-dependencies = [
- "numpy<2.0.0,>=1.20.0; python_version >= \"3.8\"",
-]
-files = [
- {file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"},
- {file = "nptyping-2.5.0.tar.gz", hash = "sha256:e3d35b53af967e6fb407c3016ff9abae954d3a0568f7cc13a461084224e8e20a"},
-]
-
-[[package]]
-name = "numexpr"
-version = "2.10.1"
-requires_python = ">=3.9"
-summary = "Fast numerical expression evaluator for NumPy"
-groups = ["default"]
-dependencies = [
- "numpy>=1.23.0",
-]
-files = [
- {file = "numexpr-2.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bbd35f17f6efc00ebd4a480192af1ee30996094a0d5343b131b0e90e61e8b554"},
- {file = "numexpr-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fecdf4bf3c1250e56583db0a4a80382a259ba4c2e1efa13e04ed43f0938071f5"},
- {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2efa499f460124538a5b4f1bf2e77b28eb443ee244cc5573ed0f6a069ebc635"},
- {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac23a72eff10f928f23b147bdeb0f1b774e862abe332fc9bf4837e9f1bc0bbf9"},
- {file = "numexpr-2.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b28eaf45f1cc1048aad9e90e3a8ada1aef58c5f8155a85267dc781b37998c046"},
- {file = "numexpr-2.10.1-cp310-cp310-win32.whl", hash = "sha256:4f0985bd1c493b23b5aad7d81fa174798f3812efb78d14844194834c9fee38b8"},
- {file = "numexpr-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:44f6d12a8c44be90199bbb10d3abf467f88951f48a3d1fbbd3c219d121f39c9d"},
- {file = "numexpr-2.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3c0b0bf165b2d886eb981afa4e77873ca076f5d51c491c4d7b8fc10f17c876f"},
- {file = "numexpr-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56648a04679063175681195670ad53e5c8ca19668166ed13875199b5600089c7"},
- {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce04ae6efe2a9d0be1a0e114115c3ae70c68b8b8fbc615c5c55c15704b01e6a4"},
- {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45f598182b4f5c153222e47d5163c3bee8d5ebcaee7e56dd2a5898d4d97e4473"},
- {file = "numexpr-2.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6a50370bea77ba94c3734a44781c716751354c6bfda2d369af3aed3d67d42871"},
- {file = "numexpr-2.10.1-cp311-cp311-win32.whl", hash = "sha256:fa4009d84a8e6e21790e718a80a22d57fe7f215283576ef2adc4183f7247f3c7"},
- {file = "numexpr-2.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:fcbf013bb8494e8ef1d11fa3457827c1571c6a3153982d709e5d17594999d4dd"},
- {file = "numexpr-2.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82fc95c301b15ff4823f98989ee363a2d5555d16a7cfd3710e98ddee726eaaaa"},
- {file = "numexpr-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbf79fef834f88607f977ab9867061dcd9b40ccb08bb28547c6dc6c73e560895"},
- {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:552c8d4b2e3b87cdb2abb40a781b9a61a9090a9f66ac7357fc5a0b93aff76be3"},
- {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22cc65e9121aeb3187a2b50827715b2b087ea70e8ab21416ea52662322087b43"},
- {file = "numexpr-2.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:00204e5853713b5eba5f3d0bc586a5d8d07f76011b597c8b4087592cc2ec2928"},
- {file = "numexpr-2.10.1-cp312-cp312-win32.whl", hash = "sha256:82bf04a1495ac475de4ab49fbe0a3a2710ed3fd1a00bc03847316b5d7602402d"},
- {file = "numexpr-2.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:300e577b3c006dd7a8270f1bb2e8a00ee15bf235b1650fe2a6febec2954bc2c3"},
- {file = "numexpr-2.10.1.tar.gz", hash = "sha256:9bba99d354a65f1a008ab8b87f07d84404c668e66bab624df5b6b5373403cf81"},
-]
-
[[package]]
name = "numpy"
-version = "1.26.4"
+version = "2.0.1"
requires_python = ">=3.9"
summary = "Fundamental package for array computing in Python"
groups = ["default"]
files = [
- {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
- {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
- {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
- {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
- {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
- {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
- {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
- {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
- {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
- {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
- {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
- {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
- {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
- {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
- {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
- {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
- {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
- {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
- {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
- {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
- {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
- {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
- {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
- {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"},
- {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"},
+ {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"},
+ {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"},
+ {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"},
+ {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"},
+ {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"},
+ {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"},
+ {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"},
+ {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"},
+ {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"},
+ {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"},
+ {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"},
+ {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"},
+ {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"},
+ {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"},
+ {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"},
+ {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"},
+ {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"},
+ {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"},
+ {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"},
]
[[package]]
name = "numpydantic"
-version = "1.2.1"
+version = "1.3.0"
requires_python = "<4.0,>=3.9"
summary = "Type and shape validation and serialization for numpy arrays in pydantic models"
groups = ["default"]
dependencies = [
- "nptyping>=2.5.0",
"numpy>=1.24.0",
"pydantic>=2.3.0",
+ "typing-extensions>=4.11.0; python_version < \"3.11\"",
]
files = [
- {file = "numpydantic-1.2.1-py3-none-any.whl", hash = "sha256:e21d7e272410b3a2013d2a6aeec2ed6efd13ea171b0200e2029d7c2f1453def0"},
- {file = "numpydantic-1.2.1.tar.gz", hash = "sha256:d8a3e7371d78b99fa4a4733a5b873046f064993431ae63f97edcf9bda4dd5c7f"},
+ {file = "numpydantic-1.3.0-py3-none-any.whl", hash = "sha256:bda3aa2cd858e9211006be8b8e589e1905b2c6a2db17cec0c28563ba1ad66b68"},
+ {file = "numpydantic-1.3.0.tar.gz", hash = "sha256:b3931d51ba7e22d48bdd2ae56cad368f63db99ef74e8570021a7fd176b2ffc1f"},
]
[[package]]
@@ -1444,14 +1329,13 @@ path = "../nwb_linkml"
summary = "Translating NWB schema language to LinkML"
groups = ["default"]
dependencies = [
- "blosc2>=2.2.7",
- "dask>=2023.9.2",
+ "black>=24.4.2",
"h5py>=3.9.0",
- "linkml @ git+https://github.com/sneakers-the-rat/linkml@arrays-numpydantic",
+ "linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml",
"linkml-runtime>=1.7.7",
- "nptyping>=2.5.0",
- "numpydantic>=1.2.1",
+ "numpydantic>=1.3.0",
"nwb-schema-language>=0.1.3",
+ "pandas>=2.2.2",
"pydantic-settings>=2.0.3",
"pydantic>=2.3.0",
"pyyaml>=6.0",
@@ -1497,6 +1381,45 @@ files = [
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
+[[package]]
+name = "pandas"
+version = "2.2.2"
+requires_python = ">=3.9"
+summary = "Powerful data structures for data analysis, time series, and statistics"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.22.4; python_version < \"3.11\"",
+ "numpy>=1.23.2; python_version == \"3.11\"",
+ "numpy>=1.26.0; python_version >= \"3.12\"",
+ "python-dateutil>=2.8.2",
+ "pytz>=2020.1",
+ "tzdata>=2022.7",
+]
+files = [
+ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
+ {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
+ {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
+ {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
+ {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
+ {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
+]
+
[[package]]
name = "parse"
version = "1.20.2"
@@ -1519,18 +1442,14 @@ files = [
]
[[package]]
-name = "partd"
-version = "1.4.2"
-requires_python = ">=3.9"
-summary = "Appendable key-value storage"
+name = "pathspec"
+version = "0.12.1"
+requires_python = ">=3.8"
+summary = "Utility library for gitignore style pattern matching of file paths."
groups = ["default"]
-dependencies = [
- "locket",
- "toolz",
-]
files = [
- {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"},
- {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"},
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
@@ -1598,7 +1517,7 @@ files = [
[[package]]
name = "prefixmaps"
-version = "0.2.4"
+version = "0.2.5"
requires_python = "<4.0,>=3.8"
summary = "A python library for retrieving semantic prefix maps"
groups = ["default"]
@@ -1607,8 +1526,8 @@ dependencies = [
"pyyaml>=5.3.1",
]
files = [
- {file = "prefixmaps-0.2.4-py3-none-any.whl", hash = "sha256:89bf0e6fb08c276f754f9624c42adf2e87c64ee92a3dde1f7eff01f22d85b512"},
- {file = "prefixmaps-0.2.4.tar.gz", hash = "sha256:ae86a1b31189d0516d199756d5808f75f44b39e86546c356cc78c0fe8d2078af"},
+ {file = "prefixmaps-0.2.5-py3-none-any.whl", hash = "sha256:68caa04b3a6a8e058aa1c55affe32c62e44b564d031d63f768e267b796a1f3ee"},
+ {file = "prefixmaps-0.2.5.tar.gz", hash = "sha256:aaccd2425ade2ea97a502c58be49fe8f3536e3d5e919712ae0358a39fc800799"},
]
[[package]]
@@ -1655,22 +1574,12 @@ files = [
[[package]]
name = "pure-eval"
-version = "0.2.2"
+version = "0.2.3"
summary = "Safely evaluate AST nodes without side effects"
groups = ["default"]
files = [
- {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
- {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
-]
-
-[[package]]
-name = "py-cpuinfo"
-version = "9.0.0"
-summary = "Get CPU info with pure Python"
-groups = ["default"]
-files = [
- {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
- {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
+ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"},
+ {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"},
]
[[package]]
@@ -1694,6 +1603,7 @@ groups = ["default"]
dependencies = [
"annotated-types>=0.4.0",
"pydantic-core==2.20.1",
+ "typing-extensions>=4.12.2; python_version >= \"3.13\"",
"typing-extensions>=4.6.1; python_version < \"3.13\"",
]
files = [
@@ -1755,20 +1665,12 @@ files = [
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
]
[[package]]
name = "pydantic-settings"
-version = "2.3.4"
+version = "2.4.0"
requires_python = ">=3.8"
summary = "Settings management using Pydantic"
groups = ["default"]
@@ -1777,8 +1679,8 @@ dependencies = [
"python-dotenv>=0.21.0",
]
files = [
- {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"},
- {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"},
+ {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"},
+ {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"},
]
[[package]]
@@ -1860,7 +1762,7 @@ files = [
[[package]]
name = "pytest"
-version = "8.2.2"
+version = "8.3.2"
requires_python = ">=3.8"
summary = "pytest: simple powerful testing with Python"
groups = ["default"]
@@ -1869,12 +1771,12 @@ dependencies = [
"exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
"iniconfig",
"packaging",
- "pluggy<2.0,>=1.5",
+ "pluggy<2,>=1.5",
"tomli>=1; python_version < \"3.11\"",
]
files = [
- {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
- {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
+ {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
+ {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
]
[[package]]
@@ -1927,6 +1829,16 @@ files = [
{file = "PyTrie-0.4.0.tar.gz", hash = "sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379"},
]
+[[package]]
+name = "pytz"
+version = "2024.1"
+summary = "World timezone definitions, modern and historical"
+groups = ["default"]
+files = [
+ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
[[package]]
name = "pywin32"
version = "306"
@@ -1946,40 +1858,44 @@ files = [
[[package]]
name = "pyyaml"
-version = "6.0.1"
-requires_python = ">=3.6"
+version = "6.0.2"
+requires_python = ">=3.8"
summary = "YAML parser and emitter for Python"
groups = ["default"]
files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "pyzmq"
-version = "26.0.3"
+version = "26.1.0"
requires_python = ">=3.7"
summary = "Python bindings for 0MQ"
groups = ["default"]
@@ -1987,64 +1903,48 @@ dependencies = [
"cffi; implementation_name == \"pypy\"",
]
files = [
- {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"},
- {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"},
- {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"},
- {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"},
- {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"},
- {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"},
- {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"},
- {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"},
- {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"},
- {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"},
- {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"},
- {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"},
- {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"},
- {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"},
- {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"},
- {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"},
- {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"},
- {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"},
- {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"},
- {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"},
- {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"},
- {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"},
- {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"},
- {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"},
- {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"},
- {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"},
- {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"},
- {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"},
- {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"},
- {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"},
- {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"},
- {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"},
- {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"},
- {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"},
- {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"},
- {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"},
- {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"},
- {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"},
- {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"},
- {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"},
- {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"},
- {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"},
- {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"},
- {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"},
- {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"},
- {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"},
- {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"},
- {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"},
- {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"},
- {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"},
- {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"},
- {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"},
- {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"},
+ {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"},
+ {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"},
+ {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"},
+ {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"},
+ {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"},
+ {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"},
+ {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"},
+ {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"},
+ {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"},
+ {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"},
+ {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"},
+ {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"},
+ {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"},
+ {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"},
+ {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"},
+ {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"},
+ {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"},
+ {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"},
+ {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"},
+ {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"},
+ {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"},
+ {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"},
+ {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"},
+ {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"},
+ {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"},
+ {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"},
+ {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"},
+ {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"},
+ {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"},
+ {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"},
+ {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"},
+ {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"},
+ {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"},
+ {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"},
+ {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"},
+ {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"},
+ {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"},
+ {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"},
+ {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"},
+ {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"},
+ {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"},
+ {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"},
]
[[package]]
@@ -2155,6 +2055,7 @@ groups = ["default"]
dependencies = [
"markdown-it-py>=2.2.0",
"pygments<3.0.0,>=2.13.0",
+ "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"",
]
files = [
{file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
@@ -2163,84 +2064,63 @@ files = [
[[package]]
name = "rpds-py"
-version = "0.19.0"
+version = "0.20.0"
requires_python = ">=3.8"
summary = "Python bindings to Rust's persistent data structures (rpds)"
groups = ["default"]
files = [
- {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"},
- {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"},
- {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"},
- {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"},
- {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"},
- {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"},
- {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"},
- {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"},
- {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"},
- {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"},
- {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"},
- {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"},
- {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"},
- {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"},
- {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"},
- {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"},
- {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"},
- {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"},
- {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"},
- {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"},
- {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"},
- {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"},
- {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"},
- {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"},
- {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"},
- {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"},
- {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"},
- {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
+ {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
+ {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
+ {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
+ {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
+ {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
+ {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
+ {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
]
[[package]]
@@ -2294,13 +2174,13 @@ files = [
[[package]]
name = "setuptools"
-version = "70.2.0"
+version = "72.1.0"
requires_python = ">=3.8"
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
groups = ["default"]
files = [
- {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"},
- {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"},
+ {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"},
+ {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"},
]
[[package]]
@@ -2401,21 +2281,22 @@ files = [
[[package]]
name = "sphinx"
-version = "7.3.7"
+version = "7.4.7"
requires_python = ">=3.9"
summary = "Python documentation generator"
groups = ["default"]
dependencies = [
- "Jinja2>=3.0",
- "Pygments>=2.14",
+ "Jinja2>=3.1",
+ "Pygments>=2.17",
"alabaster~=0.7.14",
- "babel>=2.9",
- "colorama>=0.4.5; sys_platform == \"win32\"",
- "docutils<0.22,>=0.18.1",
+ "babel>=2.13",
+ "colorama>=0.4.6; sys_platform == \"win32\"",
+ "docutils<0.22,>=0.20",
"imagesize>=1.3",
- "packaging>=21.0",
- "requests>=2.25.0",
- "snowballstemmer>=2.0",
+ "importlib-metadata>=6.0; python_version < \"3.10\"",
+ "packaging>=23.0",
+ "requests>=2.30.0",
+ "snowballstemmer>=2.2",
"sphinxcontrib-applehelp",
"sphinxcontrib-devhelp",
"sphinxcontrib-htmlhelp>=2.0.0",
@@ -2425,8 +2306,8 @@ dependencies = [
"tomli>=2; python_version < \"3.11\"",
]
files = [
- {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"},
- {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"},
+ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"},
+ {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"},
]
[[package]]
@@ -2464,16 +2345,16 @@ files = [
[[package]]
name = "sphinx-design"
-version = "0.6.0"
+version = "0.6.1"
requires_python = ">=3.9"
summary = "A sphinx extension for designing beautiful, view size responsive web components."
groups = ["default"]
dependencies = [
- "sphinx<8,>=5",
+ "sphinx<9,>=6",
]
files = [
- {file = "sphinx_design-0.6.0-py3-none-any.whl", hash = "sha256:e9bd07eecec82eb07ff72cb50fc3624e186b04f5661270bc7b62db86c7546e95"},
- {file = "sphinx_design-0.6.0.tar.gz", hash = "sha256:ec8e3c5c59fed4049b3a5a2e209360feab31829346b5f6a0c7c342b894082192"},
+ {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"},
+ {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"},
]
[[package]]
@@ -2510,35 +2391,35 @@ files = [
[[package]]
name = "sphinxcontrib-applehelp"
-version = "1.0.8"
+version = "2.0.0"
requires_python = ">=3.9"
summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
groups = ["default"]
files = [
- {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"},
- {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"},
+ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"},
+ {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"},
]
[[package]]
name = "sphinxcontrib-devhelp"
-version = "1.0.6"
+version = "2.0.0"
requires_python = ">=3.9"
summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
groups = ["default"]
files = [
- {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"},
- {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"},
+ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"},
+ {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"},
]
[[package]]
name = "sphinxcontrib-htmlhelp"
-version = "2.0.5"
+version = "2.1.0"
requires_python = ">=3.9"
summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
groups = ["default"]
files = [
- {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"},
- {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"},
+ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"},
+ {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"},
]
[[package]]
@@ -2554,63 +2435,64 @@ files = [
[[package]]
name = "sphinxcontrib-qthelp"
-version = "1.0.7"
+version = "2.0.0"
requires_python = ">=3.9"
summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
groups = ["default"]
files = [
- {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"},
- {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"},
+ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"},
+ {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"},
]
[[package]]
name = "sphinxcontrib-serializinghtml"
-version = "1.1.10"
+version = "2.0.0"
requires_python = ">=3.9"
summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
groups = ["default"]
files = [
- {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"},
- {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"},
+ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"},
+ {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"},
]
[[package]]
name = "sqlalchemy"
-version = "2.0.31"
+version = "2.0.32"
requires_python = ">=3.7"
summary = "Database Abstraction Library"
groups = ["default"]
dependencies = [
"greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"",
+ "importlib-metadata; python_version < \"3.8\"",
"typing-extensions>=4.6.0",
]
files = [
- {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"},
- {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"},
- {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"},
+ {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"},
+ {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"},
]
[[package]]
@@ -2630,16 +2512,17 @@ files = [
[[package]]
name = "starlette"
-version = "0.37.2"
+version = "0.38.2"
requires_python = ">=3.8"
summary = "The little ASGI library that shines."
groups = ["default"]
dependencies = [
"anyio<5,>=3.4.0",
+ "typing-extensions>=3.10.0; python_version < \"3.10\"",
]
files = [
- {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
- {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
+ {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"},
+ {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"},
]
[[package]]
@@ -2665,17 +2548,6 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
-[[package]]
-name = "toolz"
-version = "0.12.1"
-requires_python = ">=3.7"
-summary = "List processing tools and functional utilities"
-groups = ["default"]
-files = [
- {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"},
- {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"},
-]
-
[[package]]
name = "tornado"
version = "6.4.1"
@@ -2698,7 +2570,7 @@ files = [
[[package]]
name = "tqdm"
-version = "4.66.4"
+version = "4.66.5"
requires_python = ">=3.7"
summary = "Fast, Extensible Progress Meter"
groups = ["default"]
@@ -2706,8 +2578,8 @@ dependencies = [
"colorama; platform_system == \"Windows\"",
]
files = [
- {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
- {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
+ {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"},
+ {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"},
]
[[package]]
@@ -2743,6 +2615,17 @@ files = [
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
+[[package]]
+name = "tzdata"
+version = "2024.1"
+requires_python = ">=2"
+summary = "Provider of IANA time zone data"
+groups = ["default"]
+files = [
+ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
+ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+]
+
[[package]]
name = "uri-template"
version = "1.3.0"
@@ -2767,7 +2650,7 @@ files = [
[[package]]
name = "uvicorn"
-version = "0.30.1"
+version = "0.30.5"
requires_python = ">=3.8"
summary = "The lightning-fast ASGI server."
groups = ["default"]
@@ -2777,8 +2660,8 @@ dependencies = [
"typing-extensions>=4.0; python_version < \"3.11\"",
]
files = [
- {file = "uvicorn-0.30.1-py3-none-any.whl", hash = "sha256:cd17daa7f3b9d7a24de3617820e634d0933b69eed8e33a516071174427238c81"},
- {file = "uvicorn-0.30.1.tar.gz", hash = "sha256:d46cd8e0fd80240baffbcd9ec1012a712938754afcf81bce56c024c1656aece8"},
+ {file = "uvicorn-0.30.5-py3-none-any.whl", hash = "sha256:b2d86de274726e9878188fa07576c9ceeff90a839e2b6e25c917fe05f5a6c835"},
+ {file = "uvicorn-0.30.5.tar.gz", hash = "sha256:ac6fdbd4425c5fd17a9fe39daf4d4d075da6fdc80f653e5894cdc2fd98752bee"},
]
[[package]]
@@ -2799,10 +2682,6 @@ files = [
{file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
{file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
{file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
{file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
{file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
{file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
@@ -2868,14 +2747,6 @@ files = [
{file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"},
{file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"},
{file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"},
- {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"},
- {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"},
- {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"},
- {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"},
- {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"},
- {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"},
- {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"},
- {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"},
{file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"},
]
@@ -2884,6 +2755,9 @@ name = "wcwidth"
version = "0.2.13"
summary = "Measures the displayed width of unicode strings in a terminal"
groups = ["default"]
+dependencies = [
+ "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"",
+]
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
@@ -2945,29 +2819,19 @@ files = [
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
{file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
- {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
- {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
- {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
- {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
{file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
{file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
]
[[package]]
name = "wheel"
-version = "0.43.0"
+version = "0.44.0"
requires_python = ">=3.8"
summary = "A built-package format for Python"
groups = ["default"]
files = [
- {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"},
- {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"},
+ {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"},
+ {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"},
]
[[package]]
diff --git a/docs/pyproject.toml b/docs/pyproject.toml
index 2a4b0a5..73f84a0 100644
--- a/docs/pyproject.toml
+++ b/docs/pyproject.toml
@@ -14,7 +14,6 @@ dependencies = [
"furo>=2023.8.19",
"myst-parser>=2.0.0",
"autodoc-pydantic>=2.0.1",
- "nptyping>=2.5.0",
"sphinx-autobuild>=2021.3.14",
"sphinx-design>=0.5.0",
"sphinx-togglebutton>=0.3.2",
diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock
index f11882c..bea21e9 100644
--- a/nwb_linkml/pdm.lock
+++ b/nwb_linkml/pdm.lock
@@ -2,17 +2,23 @@
# It is not intended for manual editing.
[metadata]
-groups = ["default", "dev", "tests"]
-strategy = ["cross_platform", "inherit_metadata"]
-lock_version = "4.4.2"
-content_hash = "sha256:5f3b06ea8b752b60c44c392e6079a1e0af96ad60d4442faeef88bf055ee90529"
+groups = ["default", "dev", "plot", "tests"]
+strategy = ["inherit_metadata"]
+lock_version = "4.5.0"
+content_hash = "sha256:d9e9b2a7f48f3db3e59cd58907a18aa69f91595d07eca53be0bf53b5fb2ba990"
+
+[[metadata.targets]]
+requires_python = ">=3.10,<3.13"
[[package]]
name = "annotated-types"
version = "0.7.0"
requires_python = ">=3.8"
summary = "Reusable constraint types to use with typing.Annotated"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
+dependencies = [
+ "typing-extensions>=4.0.0; python_version < \"3.9\"",
+]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -22,7 +28,10 @@ files = [
name = "antlr4-python3-runtime"
version = "4.9.3"
summary = "ANTLR 4.9.3 runtime for Python 3.7"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
+dependencies = [
+ "typing; python_version < \"3.5\"",
+]
files = [
{file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"},
]
@@ -32,7 +41,7 @@ name = "arrow"
version = "1.3.0"
requires_python = ">=3.8"
summary = "Better dates & times for Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"python-dateutil>=2.7.0",
"types-python-dateutil>=2.8.10",
@@ -44,21 +53,24 @@ files = [
[[package]]
name = "attrs"
-version = "23.2.0"
+version = "24.2.0"
requires_python = ">=3.7"
summary = "Classes Without Boilerplate"
groups = ["default", "dev", "tests"]
+dependencies = [
+ "importlib-metadata; python_version < \"3.8\"",
+]
files = [
- {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
- {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
+ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
+ {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
]
[[package]]
name = "black"
-version = "24.4.2"
+version = "24.8.0"
requires_python = ">=3.8"
summary = "The uncompromising code formatter."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"click>=8.0.0",
"mypy-extensions>=0.4.3",
@@ -69,20 +81,20 @@ dependencies = [
"typing-extensions>=4.0.1; python_version < \"3.11\"",
]
files = [
- {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
- {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
- {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
- {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
- {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
- {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
- {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
- {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
- {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
- {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
- {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
- {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
- {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
- {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
+ {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
+ {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
+ {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
+ {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
+ {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
+ {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
+ {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
+ {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
+ {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
+ {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
+ {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
+ {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
+ {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
+ {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
]
[[package]]
@@ -90,7 +102,7 @@ name = "blinker"
version = "1.8.2"
requires_python = ">=3.8"
summary = "Fast, simple object-to-object and broadcast signaling"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
@@ -117,7 +129,7 @@ name = "certifi"
version = "2024.7.4"
requires_python = ">=3.6"
summary = "Python package for providing Mozilla's CA Bundle."
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
@@ -127,7 +139,7 @@ files = [
name = "cfgraph"
version = "0.2.1"
summary = "rdflib collections flattening graph"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"rdflib>=0.4.2",
]
@@ -140,7 +152,7 @@ name = "chardet"
version = "5.2.0"
requires_python = ">=3.7"
summary = "Universal encoding detector for Python 3"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"},
{file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
@@ -151,7 +163,7 @@ name = "charset-normalizer"
version = "3.3.2"
requires_python = ">=3.7.0"
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
@@ -207,32 +219,22 @@ name = "click"
version = "8.1.7"
requires_python = ">=3.7"
summary = "Composable command line interface toolkit"
-groups = ["default", "dev", "tests"]
+groups = ["default", "plot"]
dependencies = [
"colorama; platform_system == \"Windows\"",
+ "importlib-metadata; python_version < \"3.8\"",
]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
-[[package]]
-name = "cloudpickle"
-version = "3.0.0"
-requires_python = ">=3.8"
-summary = "Pickler class to extend the standard pickle.Pickler functionality"
-groups = ["default", "dev", "tests"]
-files = [
- {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"},
- {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"},
-]
-
[[package]]
name = "colorama"
version = "0.4.6"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
summary = "Cross-platform colored terminal text."
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@@ -240,83 +242,89 @@ files = [
[[package]]
name = "coverage"
-version = "6.5.0"
-requires_python = ">=3.7"
+version = "7.6.1"
+requires_python = ">=3.8"
summary = "Code coverage measurement for Python"
groups = ["dev", "tests"]
files = [
- {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
- {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
- {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
- {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
- {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
- {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
- {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
- {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
- {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
+ {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
+ {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
+ {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
+ {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
+ {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
+ {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
+ {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
+ {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
]
[[package]]
name = "coverage"
-version = "6.5.0"
+version = "7.6.1"
extras = ["toml"]
-requires_python = ">=3.7"
+requires_python = ">=3.8"
summary = "Code coverage measurement for Python"
groups = ["dev", "tests"]
dependencies = [
- "coverage==6.5.0",
+ "coverage==7.6.1",
"tomli; python_full_version <= \"3.11.0a6\"",
]
files = [
- {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
- {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
- {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
- {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
- {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
- {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
- {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
- {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
- {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
-]
-
-[[package]]
-name = "coveralls"
-version = "3.3.1"
-requires_python = ">= 3.5"
-summary = "Show coverage stats online via coveralls.io"
-groups = ["dev", "tests"]
-dependencies = [
- "coverage!=6.0.*,!=6.1,!=6.1.1,<7.0,>=4.1",
- "docopt>=0.6.1",
- "requests>=1.0.0",
-]
-files = [
- {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"},
- {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
+ {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
+ {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
+ {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
+ {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
+ {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
+ {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
+ {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
+ {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
]
[[package]]
@@ -324,7 +332,7 @@ name = "curies"
version = "0.7.10"
requires_python = ">=3.8"
summary = "Idiomatic conversion between URIs and compact URIs (CURIEs)."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"pydantic",
"pytrie",
@@ -340,7 +348,7 @@ name = "dash"
version = "2.17.1"
requires_python = ">=3.8"
summary = "A Python framework for building reactive web-apps. Developed by Plotly."
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"Flask<3.1,>=1.0.4",
"Werkzeug<3.1",
@@ -364,7 +372,7 @@ files = [
name = "dash-core-components"
version = "2.0.0"
summary = "Core component suite for Dash"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
@@ -374,7 +382,7 @@ files = [
name = "dash-cytoscape"
version = "0.3.0"
summary = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js"
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"dash",
]
@@ -387,7 +395,7 @@ files = [
name = "dash-html-components"
version = "2.0.0"
summary = "Vanilla HTML components for Dash"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
@@ -397,39 +405,18 @@ files = [
name = "dash-table"
version = "5.0.0"
summary = "Dash table"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
]
-[[package]]
-name = "dask"
-version = "2024.7.1"
-requires_python = ">=3.9"
-summary = "Parallel PyData with Task Scheduling"
-groups = ["default", "dev", "tests"]
-dependencies = [
- "click>=8.1",
- "cloudpickle>=1.5.0",
- "fsspec>=2021.09.0",
- "importlib-metadata>=4.13.0; python_version < \"3.12\"",
- "packaging>=20.0",
- "partd>=1.4.0",
- "pyyaml>=5.3.1",
- "toolz>=0.10.0",
-]
-files = [
- {file = "dask-2024.7.1-py3-none-any.whl", hash = "sha256:dd046840050376c317de90629db5c6197adda820176cf3e2df10c3219d11951f"},
- {file = "dask-2024.7.1.tar.gz", hash = "sha256:dbaef2d50efee841a9d981a218cfeb50392fc9a95e0403b6d680450e4f50d531"},
-]
-
[[package]]
name = "deprecated"
version = "1.2.14"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
summary = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"wrapt<2,>=1.10",
]
@@ -438,21 +425,12 @@ files = [
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
]
-[[package]]
-name = "docopt"
-version = "0.6.2"
-summary = "Pythonic argument parser, that will make you smile"
-groups = ["dev", "tests"]
-files = [
- {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
-]
-
[[package]]
name = "et-xmlfile"
version = "1.1.0"
requires_python = ">=3.6"
summary = "An implementation of lxml.xmlfile for the standard library"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
{file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
@@ -475,12 +453,13 @@ name = "flask"
version = "3.0.3"
requires_python = ">=3.8"
summary = "A simple framework for building complex web applications."
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"Jinja2>=3.1.2",
"Werkzeug>=3.0.0",
"blinker>=1.6.2",
"click>=8.1.3",
+ "importlib-metadata>=3.6.0; python_version < \"3.10\"",
"itsdangerous>=2.1.2",
]
files = [
@@ -493,51 +472,35 @@ name = "fqdn"
version = "1.5.1"
requires_python = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4"
summary = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
+dependencies = [
+ "cached-property>=1.3.0; python_version < \"3.8\"",
+]
files = [
{file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"},
{file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"},
]
-[[package]]
-name = "fsspec"
-version = "2024.6.1"
-requires_python = ">=3.8"
-summary = "File-system specification"
-groups = ["default", "dev", "tests"]
-files = [
- {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"},
- {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"},
-]
-
[[package]]
name = "future-fstrings"
version = "1.2.0"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
summary = "A backport of fstrings to python<3.6"
groups = ["dev", "tests"]
+dependencies = [
+ "tokenize-rt>=3; python_version < \"3.6\"",
+]
files = [
{file = "future_fstrings-1.2.0-py2.py3-none-any.whl", hash = "sha256:90e49598b553d8746c4dc7d9442e0359d038c3039d802c91c0a55505da318c63"},
{file = "future_fstrings-1.2.0.tar.gz", hash = "sha256:6cf41cbe97c398ab5a81168ce0dbb8ad95862d3caf23c21e4430627b90844089"},
]
-[[package]]
-name = "gprof2dot"
-version = "2024.6.6"
-requires_python = ">=3.8"
-summary = "Generate a dot graph from the output of several profilers."
-groups = ["dev", "tests"]
-files = [
- {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"},
- {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"},
-]
-
[[package]]
name = "graphviz"
version = "0.20.3"
requires_python = ">=3.8"
summary = "Simple Python interface for Graphviz"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "graphviz-0.20.3-py3-none-any.whl", hash = "sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5"},
{file = "graphviz-0.20.3.zip", hash = "sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d"},
@@ -548,8 +511,8 @@ name = "greenlet"
version = "3.0.3"
requires_python = ">=3.7"
summary = "Lightweight in-process concurrent programming"
-groups = ["default", "dev", "tests"]
-marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\""
+groups = ["default"]
+marker = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""
files = [
{file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
@@ -586,7 +549,7 @@ name = "h5py"
version = "3.11.0"
requires_python = ">=3.8"
summary = "Read and write HDF5 files from Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"numpy>=1.17.3",
]
@@ -611,7 +574,7 @@ name = "hbreader"
version = "0.9.1"
requires_python = ">=3.7"
summary = "Honey Badger reader - a generic file/url/string open and read tool"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "hbreader-0.9.1-py3-none-any.whl", hash = "sha256:9a6e76c9d1afc1b977374a5dc430a1ebb0ea0488205546d4678d6e31cc5f6801"},
{file = "hbreader-0.9.1.tar.gz", hash = "sha256:d2c132f8ba6276d794c66224c3297cec25c8079d0a4cf019c061611e0a3b94fa"},
@@ -622,7 +585,7 @@ name = "idna"
version = "3.7"
requires_python = ">=3.5"
summary = "Internationalized Domain Names in Applications (IDNA)"
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
@@ -633,8 +596,9 @@ name = "importlib-metadata"
version = "8.2.0"
requires_python = ">=3.8"
summary = "Read metadata from Python packages"
-groups = ["default", "dev", "tests"]
+groups = ["plot"]
dependencies = [
+ "typing-extensions>=3.6.4; python_version < \"3.8\"",
"zipp>=0.5",
]
files = [
@@ -657,7 +621,7 @@ files = [
name = "isodate"
version = "0.6.1"
summary = "An ISO 8601 date/time/duration parser and formatter"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"six",
]
@@ -671,7 +635,7 @@ name = "isoduration"
version = "20.11.0"
requires_python = ">=3.7"
summary = "Operations with ISO 8601 durations"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"arrow>=0.15.0",
]
@@ -685,7 +649,7 @@ name = "itsdangerous"
version = "2.2.0"
requires_python = ">=3.8"
summary = "Safely pass data to untrusted environments and back."
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
@@ -696,7 +660,7 @@ name = "jinja2"
version = "3.1.4"
requires_python = ">=3.7"
summary = "A very fast and expressive template engine."
-groups = ["default", "dev", "tests"]
+groups = ["default", "plot"]
dependencies = [
"MarkupSafe>=2.0",
]
@@ -710,7 +674,7 @@ name = "json-flattener"
version = "0.1.9"
requires_python = ">=3.7.0"
summary = "Python library for denormalizing nested dicts or json objects to tables and back"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"click",
"pyyaml",
@@ -724,7 +688,7 @@ files = [
name = "jsonasobj"
version = "1.3.1"
summary = "JSON as python objects"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "jsonasobj-1.3.1-py3-none-any.whl", hash = "sha256:b9e329dc1ceaae7cf5d5b214684a0b100e0dad0be6d5bbabac281ec35ddeca65"},
{file = "jsonasobj-1.3.1.tar.gz", hash = "sha256:d52e0544a54a08f6ea3f77fa3387271e3648655e0eace2f21e825c26370e44a2"},
@@ -735,7 +699,7 @@ name = "jsonasobj2"
version = "1.0.4"
requires_python = ">=3.6"
summary = "JSON as python objects - version 2"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"hbreader",
]
@@ -749,7 +713,7 @@ name = "jsonpatch"
version = "1.33"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
summary = "Apply JSON-Patches (RFC 6902) "
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"jsonpointer>=1.9",
]
@@ -762,7 +726,7 @@ files = [
name = "jsonpath-ng"
version = "1.6.1"
summary = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"ply",
]
@@ -776,7 +740,7 @@ name = "jsonpointer"
version = "3.0.0"
requires_python = ">=3.7"
summary = "Identify specific nodes in a JSON document (RFC 6901) "
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
{file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
@@ -787,10 +751,12 @@ name = "jsonschema"
version = "4.23.0"
requires_python = ">=3.8"
summary = "An implementation of JSON Schema validation for Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"attrs>=22.2.0",
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
"jsonschema-specifications>=2023.03.6",
+ "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"",
"referencing>=0.28.4",
"rpds-py>=0.7.1",
]
@@ -804,8 +770,9 @@ name = "jsonschema-specifications"
version = "2023.12.1"
requires_python = ">=3.8"
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
+ "importlib-resources>=1.4.0; python_version < \"3.9\"",
"referencing>=0.31.0",
]
files = [
@@ -819,7 +786,7 @@ version = "4.23.0"
extras = ["format"]
requires_python = ">=3.8"
summary = "An implementation of JSON Schema validation for Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"fqdn",
"idna",
@@ -842,9 +809,9 @@ version = "0.0.0"
requires_python = "<4.0.0,>=3.8.1"
git = "https://github.com/sneakers-the-rat/linkml"
ref = "nwb-linkml"
-revision = "df8685eb9e99eaf9ec694db2e9cd59bab8892438"
+revision = "0a6578bff4713688260f64b3076b197bd6decce9"
summary = "Linked Open Data Modeling Language"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"antlr4-python3-runtime<4.10,==4.*,>=4.9.0",
"click>=7.0",
@@ -869,6 +836,7 @@ dependencies = [
"rdflib>=6.0.0",
"requests>=2.22",
"sqlalchemy>=1.4.31",
+ "typing-extensions>=4.4.0; python_version < \"3.9\"",
"watchdog>=0.9.0",
]
@@ -877,7 +845,7 @@ name = "linkml-dataops"
version = "0.1.0"
requires_python = ">=3.7"
summary = "LinkML Data Operations API"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"jinja2",
"jsonpatch",
@@ -895,7 +863,7 @@ name = "linkml-runtime"
version = "1.8.0"
requires_python = "<4.0,>=3.8"
summary = "Runtime environment for LinkML, the Linked open data modeling language"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"click",
"curies>=0.5.4",
@@ -916,23 +884,12 @@ files = [
{file = "linkml_runtime-1.8.0.tar.gz", hash = "sha256:436381a7bf791e9af4ef0a5adcac86762d451b77670fbdb3ba083d2c177fb5f2"},
]
-[[package]]
-name = "locket"
-version = "1.0.0"
-requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-summary = "File-based locks for Python on Linux and Windows"
-groups = ["default", "dev", "tests"]
-files = [
- {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"},
- {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"},
-]
-
[[package]]
name = "markdown-it-py"
version = "3.0.0"
requires_python = ">=3.8"
summary = "Python port of markdown-it. Markdown parsing, done right!"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"mdurl~=0.1",
]
@@ -946,7 +903,7 @@ name = "markupsafe"
version = "2.1.5"
requires_python = ">=3.7"
summary = "Safely add untrusted strings to HTML/XML markup."
-groups = ["default", "dev", "tests"]
+groups = ["default", "plot"]
files = [
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
@@ -986,7 +943,7 @@ name = "mdurl"
version = "0.1.2"
requires_python = ">=3.7"
summary = "Markdown URL utilities"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
@@ -997,7 +954,7 @@ name = "mypy-extensions"
version = "1.0.0"
requires_python = ">=3.5"
summary = "Type system extensions for programs checked with the mypy type checker."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
@@ -1008,7 +965,7 @@ name = "nest-asyncio"
version = "1.6.0"
requires_python = ">=3.5"
summary = "Patch asyncio to allow nested event loops"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
@@ -1025,71 +982,60 @@ files = [
{file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
]
-[[package]]
-name = "nptyping"
-version = "2.5.0"
-requires_python = ">=3.7"
-summary = "Type hints for NumPy."
-groups = ["default", "dev", "tests"]
-dependencies = [
- "numpy<2.0.0,>=1.20.0; python_version >= \"3.8\"",
-]
-files = [
- {file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"},
- {file = "nptyping-2.5.0.tar.gz", hash = "sha256:e3d35b53af967e6fb407c3016ff9abae954d3a0568f7cc13a461084224e8e20a"},
-]
-
[[package]]
name = "numpy"
-version = "1.26.4"
+version = "2.0.1"
requires_python = ">=3.9"
summary = "Fundamental package for array computing in Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
- {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
- {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
- {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
- {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
- {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
- {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
- {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
- {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
- {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
- {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
- {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
- {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
- {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
- {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
- {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
- {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
- {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
- {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
- {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
- {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
- {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
- {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
- {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
- {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"},
- {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"},
- {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"},
+ {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"},
+ {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"},
+ {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"},
+ {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"},
+ {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"},
+ {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"},
+ {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"},
+ {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"},
+ {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"},
+ {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"},
+ {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"},
+ {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"},
+ {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"},
+ {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"},
+ {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"},
+ {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"},
+ {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"},
+ {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"},
+ {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"},
+ {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"},
+ {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"},
+ {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"},
]
[[package]]
name = "numpydantic"
-version = "1.2.1"
+version = "1.3.3"
requires_python = "<4.0,>=3.9"
summary = "Type and shape validation and serialization for numpy arrays in pydantic models"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
- "nptyping>=2.5.0",
"numpy>=1.24.0",
"pydantic>=2.3.0",
+ "typing-extensions>=4.11.0; python_version < \"3.11\"",
]
files = [
- {file = "numpydantic-1.2.1-py3-none-any.whl", hash = "sha256:e21d7e272410b3a2013d2a6aeec2ed6efd13ea171b0200e2029d7c2f1453def0"},
- {file = "numpydantic-1.2.1.tar.gz", hash = "sha256:d8a3e7371d78b99fa4a4733a5b873046f064993431ae63f97edcf9bda4dd5c7f"},
+ {file = "numpydantic-1.3.3-py3-none-any.whl", hash = "sha256:e002767252b1b77abb7715834ab7cbf58964baddae44863710f09e71b23287e4"},
+ {file = "numpydantic-1.3.3.tar.gz", hash = "sha256:1cc2744f7b5fbcecd51a64fafaf8c9a564bb296336a566a16be97ba7b1c28698"},
]
[[package]]
@@ -1097,7 +1043,7 @@ name = "nwb-schema-language"
version = "0.1.3"
requires_python = ">=3.9,<4.0"
summary = "Translation of the nwb-schema-language to LinkML"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"linkml-runtime<2.0.0,>=1.1.24",
"pydantic<3.0.0,>=2.3.0",
@@ -1112,7 +1058,7 @@ name = "openpyxl"
version = "3.1.5"
requires_python = ">=3.8"
summary = "A Python library to read/write Excel 2010 xlsx/xlsm files"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"et-xmlfile",
]
@@ -1126,35 +1072,59 @@ name = "packaging"
version = "24.1"
requires_python = ">=3.8"
summary = "Core utilities for Python packages"
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
-name = "parse"
-version = "1.20.2"
-summary = "parse() is the opposite of format()"
-groups = ["default", "dev", "tests"]
+name = "pandas"
+version = "2.2.2"
+requires_python = ">=3.9"
+summary = "Powerful data structures for data analysis, time series, and statistics"
+groups = ["default"]
+dependencies = [
+ "numpy>=1.22.4; python_version < \"3.11\"",
+ "numpy>=1.23.2; python_version == \"3.11\"",
+ "numpy>=1.26.0; python_version >= \"3.12\"",
+ "python-dateutil>=2.8.2",
+ "pytz>=2020.1",
+ "tzdata>=2022.7",
+]
files = [
- {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"},
- {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"},
+ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
+ {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
+ {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
+ {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
+ {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
+ {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
+ {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
+ {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
+ {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
+ {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
+ {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
+ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
+ {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
+ {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
]
[[package]]
-name = "partd"
-version = "1.4.2"
-requires_python = ">=3.9"
-summary = "Appendable key-value storage"
-groups = ["default", "dev", "tests"]
-dependencies = [
- "locket",
- "toolz",
-]
+name = "parse"
+version = "1.20.2"
+summary = "parse() is the opposite of format()"
+groups = ["default"]
files = [
- {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"},
- {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"},
+ {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"},
+ {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"},
]
[[package]]
@@ -1162,7 +1132,7 @@ name = "pathspec"
version = "0.12.1"
requires_python = ">=3.8"
summary = "Utility library for gitignore style pattern matching of file paths."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
@@ -1184,7 +1154,7 @@ name = "plotly"
version = "5.23.0"
requires_python = ">=3.8"
summary = "An open-source, interactive data visualization library for Python"
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"packaging",
"tenacity>=6.2.0",
@@ -1209,7 +1179,7 @@ files = [
name = "ply"
version = "3.11"
summary = "Python Lex & Yacc"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
@@ -1220,7 +1190,7 @@ name = "prefixcommons"
version = "0.1.12"
requires_python = ">=3.7,<4.0"
summary = "A python API for working with ID prefixes"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"PyYAML<7.0,>=6.0",
"click<9.0.0,>=8.1.3",
@@ -1237,7 +1207,7 @@ name = "prefixmaps"
version = "0.2.5"
requires_python = "<4.0,>=3.8"
summary = "A python library for retrieving semantic prefix maps"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"curies>=0.5.3",
"pyyaml>=5.3.1",
@@ -1252,10 +1222,11 @@ name = "pydantic"
version = "2.8.2"
requires_python = ">=3.8"
summary = "Data validation using Python type hints"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"annotated-types>=0.4.0",
"pydantic-core==2.20.1",
+ "typing-extensions>=4.12.2; python_version >= \"3.13\"",
"typing-extensions>=4.6.1; python_version < \"3.13\"",
]
files = [
@@ -1268,7 +1239,7 @@ name = "pydantic-core"
version = "2.20.1"
requires_python = ">=3.8"
summary = "Core functionality for Pydantic validation and serialization"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"typing-extensions!=4.7.0,>=4.6.0",
]
@@ -1317,30 +1288,22 @@ files = [
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
- {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
]
[[package]]
name = "pydantic-settings"
-version = "2.3.4"
+version = "2.4.0"
requires_python = ">=3.8"
summary = "Settings management using Pydantic"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"pydantic>=2.7.0",
"python-dotenv>=0.21.0",
]
files = [
- {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"},
- {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"},
+ {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"},
+ {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"},
]
[[package]]
@@ -1348,7 +1311,7 @@ name = "pygments"
version = "2.18.0"
requires_python = ">=3.8"
summary = "Pygments is a syntax highlighting package written in Python."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
@@ -1358,7 +1321,7 @@ files = [
name = "pyjsg"
version = "0.11.10"
summary = "Python JSON Schema Grammar interpreter"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"antlr4-python3-runtime~=4.9.3",
"jsonasobj>=1.2.1",
@@ -1373,7 +1336,7 @@ name = "pyparsing"
version = "3.1.2"
requires_python = ">=3.6.8"
summary = "pyparsing module - Classes and methods to define and execute parsing grammars"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
{file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
@@ -1384,7 +1347,7 @@ name = "pyshex"
version = "0.8.1"
requires_python = ">=3.6"
summary = "Python ShEx Implementation"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"cfgraph>=0.2.1",
"chardet",
@@ -1406,7 +1369,7 @@ name = "pyshexc"
version = "0.9.1"
requires_python = ">=3.7"
summary = "PyShExC - Python ShEx compiler"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"antlr4-python3-runtime~=4.9.3",
"chardet",
@@ -1422,8 +1385,8 @@ files = [
[[package]]
name = "pytest"
-version = "7.4.4"
-requires_python = ">=3.7"
+version = "8.3.2"
+requires_python = ">=3.8"
summary = "pytest: simple powerful testing with Python"
groups = ["default", "dev", "tests"]
dependencies = [
@@ -1431,12 +1394,12 @@ dependencies = [
"exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
"iniconfig",
"packaging",
- "pluggy<2.0,>=0.12",
- "tomli>=1.0.0; python_version < \"3.11\"",
+ "pluggy<2,>=1.5",
+ "tomli>=1; python_version < \"3.11\"",
]
files = [
- {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
- {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
+ {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
+ {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
]
[[package]]
@@ -1474,7 +1437,7 @@ files = [
name = "pytest-logging"
version = "2015.11.4"
summary = "Configures logging and allows tweaking the log level with a py.test flag"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"pytest>=2.8.1",
]
@@ -1482,41 +1445,12 @@ files = [
{file = "pytest-logging-2015.11.4.tar.gz", hash = "sha256:cec5c85ecf18aab7b2ead5498a31b9f758680ef5a902b9054ab3f2bdbb77c896"},
]
-[[package]]
-name = "pytest-md"
-version = "0.2.0"
-requires_python = ">=3.6"
-summary = "Plugin for generating Markdown reports for pytest results"
-groups = ["dev", "tests"]
-dependencies = [
- "pytest>=4.2.1",
-]
-files = [
- {file = "pytest-md-0.2.0.tar.gz", hash = "sha256:3b248d5b360ea5198e05b4f49c7442234812809a63137ec6cdd3643a40cf0112"},
- {file = "pytest_md-0.2.0-py3-none-any.whl", hash = "sha256:4c4cd16fea6d1485e87ee254558712c804a96d2aa9674b780e7eb8fb6526e1d1"},
-]
-
-[[package]]
-name = "pytest-profiling"
-version = "1.7.0"
-summary = "Profiling plugin for py.test"
-groups = ["dev", "tests"]
-dependencies = [
- "gprof2dot",
- "pytest",
- "six",
-]
-files = [
- {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"},
- {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"},
-]
-
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
summary = "Extensions to the standard Python datetime module"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"six>=1.5",
]
@@ -1530,7 +1464,7 @@ name = "python-dotenv"
version = "1.0.1"
requires_python = ">=3.8"
summary = "Read key-value pairs from a .env file and set them as environment variables"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@@ -1540,7 +1474,7 @@ files = [
name = "pytrie"
version = "0.4.0"
summary = "A pure Python implementation of the trie data structure."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"sortedcontainers",
]
@@ -1550,36 +1484,50 @@ files = [
]
[[package]]
-name = "pyyaml"
-version = "6.0.1"
-requires_python = ">=3.6"
-summary = "YAML parser and emitter for Python"
-groups = ["default", "dev", "tests"]
+name = "pytz"
+version = "2024.1"
+summary = "World timezone definitions, modern and historical"
+groups = ["default"]
files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+requires_python = ">=3.8"
+summary = "YAML parser and emitter for Python"
+groups = ["default"]
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
@@ -1587,7 +1535,7 @@ name = "rdflib"
version = "7.0.0"
requires_python = ">=3.8.1,<4.0.0"
summary = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"isodate<0.7.0,>=0.6.0",
"pyparsing<4,>=2.1.0",
@@ -1601,7 +1549,7 @@ files = [
name = "rdflib-jsonld"
version = "0.6.1"
summary = "rdflib extension adding JSON-LD parser and serializer"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"rdflib>=5.0.0",
]
@@ -1615,7 +1563,7 @@ name = "rdflib-shim"
version = "1.0.3"
requires_python = ">=3.7"
summary = "Shim for rdflib 5 and 6 incompatibilities"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"rdflib-jsonld==0.6.1",
"rdflib>=5.0.0",
@@ -1630,7 +1578,7 @@ name = "referencing"
version = "0.35.1"
requires_python = ">=3.8"
summary = "JSON Referencing + Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"attrs>=22.2.0",
"rpds-py>=0.7.0",
@@ -1645,7 +1593,7 @@ name = "requests"
version = "2.32.3"
requires_python = ">=3.8"
summary = "Python HTTP for Humans."
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
dependencies = [
"certifi>=2017.4.17",
"charset-normalizer<4,>=2",
@@ -1680,7 +1628,7 @@ files = [
name = "retrying"
version = "1.3.4"
summary = "Retrying"
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"six>=1.7.0",
]
@@ -1694,7 +1642,7 @@ name = "rfc3339-validator"
version = "0.1.4"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
summary = "A pure python RFC3339 validator"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"six",
]
@@ -1707,7 +1655,7 @@ files = [
name = "rfc3987"
version = "1.3.8"
summary = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "rfc3987-1.3.8-py2.py3-none-any.whl", hash = "sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53"},
{file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"},
@@ -1718,10 +1666,11 @@ name = "rich"
version = "13.7.1"
requires_python = ">=3.7.0"
summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"markdown-it-py>=2.2.0",
"pygments<3.0.0,>=2.13.0",
+ "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"",
]
files = [
{file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
@@ -1730,75 +1679,63 @@ files = [
[[package]]
name = "rpds-py"
-version = "0.19.1"
+version = "0.20.0"
requires_python = ">=3.8"
summary = "Python bindings to Rust's persistent data structures (rpds)"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
- {file = "rpds_py-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aaf71f95b21f9dc708123335df22e5a2fef6307e3e6f9ed773b2e0938cc4d491"},
- {file = "rpds_py-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca0dda0c5715efe2ab35bb83f813f681ebcd2840d8b1b92bfc6fe3ab382fae4a"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81db2e7282cc0487f500d4db203edc57da81acde9e35f061d69ed983228ffe3b"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a8dfa125b60ec00c7c9baef945bb04abf8ac772d8ebefd79dae2a5f316d7850"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271accf41b02687cef26367c775ab220372ee0f4925591c6796e7c148c50cab5"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9bc4161bd3b970cd6a6fcda70583ad4afd10f2750609fb1f3ca9505050d4ef3"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0cf2a0dbb5987da4bd92a7ca727eadb225581dd9681365beba9accbe5308f7d"},
- {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5e28e56143750808c1c79c70a16519e9bc0a68b623197b96292b21b62d6055c"},
- {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c7af6f7b80f687b33a4cdb0a785a5d4de1fb027a44c9a049d8eb67d5bfe8a687"},
- {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e429fc517a1c5e2a70d576077231538a98d59a45dfc552d1ac45a132844e6dfb"},
- {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d2dbd8f4990d4788cb122f63bf000357533f34860d269c1a8e90ae362090ff3a"},
- {file = "rpds_py-0.19.1-cp310-none-win32.whl", hash = "sha256:e0f9d268b19e8f61bf42a1da48276bcd05f7ab5560311f541d22557f8227b866"},
- {file = "rpds_py-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:df7c841813f6265e636fe548a49664c77af31ddfa0085515326342a751a6ba51"},
- {file = "rpds_py-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:902cf4739458852fe917104365ec0efbea7d29a15e4276c96a8d33e6ed8ec137"},
- {file = "rpds_py-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3d73022990ab0c8b172cce57c69fd9a89c24fd473a5e79cbce92df87e3d9c48"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3837c63dd6918a24de6c526277910e3766d8c2b1627c500b155f3eecad8fad65"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cdb7eb3cf3deb3dd9e7b8749323b5d970052711f9e1e9f36364163627f96da58"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26ab43b6d65d25b1a333c8d1b1c2f8399385ff683a35ab5e274ba7b8bb7dc61c"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75130df05aae7a7ac171b3b5b24714cffeabd054ad2ebc18870b3aa4526eba23"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c34f751bf67cab69638564eee34023909380ba3e0d8ee7f6fe473079bf93f09b"},
- {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2671cb47e50a97f419a02cd1e0c339b31de017b033186358db92f4d8e2e17d8"},
- {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c73254c256081704dba0a333457e2fb815364018788f9b501efe7c5e0ada401"},
- {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4383beb4a29935b8fa28aca8fa84c956bf545cb0c46307b091b8d312a9150e6a"},
- {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dbceedcf4a9329cc665452db1aaf0845b85c666e4885b92ee0cddb1dbf7e052a"},
- {file = "rpds_py-0.19.1-cp311-none-win32.whl", hash = "sha256:f0a6d4a93d2a05daec7cb885157c97bbb0be4da739d6f9dfb02e101eb40921cd"},
- {file = "rpds_py-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:c149a652aeac4902ecff2dd93c3b2681c608bd5208c793c4a99404b3e1afc87c"},
- {file = "rpds_py-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:56313be667a837ff1ea3508cebb1ef6681d418fa2913a0635386cf29cff35165"},
- {file = "rpds_py-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d1d7539043b2b31307f2c6c72957a97c839a88b2629a348ebabe5aa8b626d6b"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1dc59a5e7bc7f44bd0c048681f5e05356e479c50be4f2c1a7089103f1621d5"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8f78398e67a7227aefa95f876481485403eb974b29e9dc38b307bb6eb2315ea"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef07a0a1d254eeb16455d839cef6e8c2ed127f47f014bbda64a58b5482b6c836"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8124101e92c56827bebef084ff106e8ea11c743256149a95b9fd860d3a4f331f"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08ce9c95a0b093b7aec75676b356a27879901488abc27e9d029273d280438505"},
- {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b02dd77a2de6e49078c8937aadabe933ceac04b41c5dde5eca13a69f3cf144e"},
- {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4dd02e29c8cbed21a1875330b07246b71121a1c08e29f0ee3db5b4cfe16980c4"},
- {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9c7042488165f7251dc7894cd533a875d2875af6d3b0e09eda9c4b334627ad1c"},
- {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f809a17cc78bd331e137caa25262b507225854073fd319e987bd216bed911b7c"},
- {file = "rpds_py-0.19.1-cp312-none-win32.whl", hash = "sha256:3ddab996807c6b4227967fe1587febade4e48ac47bb0e2d3e7858bc621b1cace"},
- {file = "rpds_py-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:32e0db3d6e4f45601b58e4ac75c6f24afbf99818c647cc2066f3e4b192dabb1f"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d5c7e32f3ee42f77d8ff1a10384b5cdcc2d37035e2e3320ded909aa192d32c3"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:89cc8921a4a5028d6dd388c399fcd2eef232e7040345af3d5b16c04b91cf3c7e"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca34e913d27401bda2a6f390d0614049f5a95b3b11cd8eff80fe4ec340a1208"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5953391af1405f968eb5701ebbb577ebc5ced8d0041406f9052638bafe52209d"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:840e18c38098221ea6201f091fc5d4de6128961d2930fbbc96806fb43f69aec1"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d8b735c4d162dc7d86a9cf3d717f14b6c73637a1f9cd57fe7e61002d9cb1972"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce757c7c90d35719b38fa3d4ca55654a76a40716ee299b0865f2de21c146801c"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9421b23c85f361a133aa7c5e8ec757668f70343f4ed8fdb5a4a14abd5437244"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3b823be829407393d84ee56dc849dbe3b31b6a326f388e171555b262e8456cc1"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:5e58b61dcbb483a442c6239c3836696b79f2cd8e7eec11e12155d3f6f2d886d1"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39d67896f7235b2c886fb1ee77b1491b77049dcef6fbf0f401e7b4cbed86bbd4"},
- {file = "rpds_py-0.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b32cd4ab6db50c875001ba4f5a6b30c0f42151aa1fbf9c2e7e3674893fb1dc4"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c32e41de995f39b6b315d66c27dea3ef7f7c937c06caab4c6a79a5e09e2c415"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a129c02b42d46758c87faeea21a9f574e1c858b9f358b6dd0bbd71d17713175"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:346557f5b1d8fd9966059b7a748fd79ac59f5752cd0e9498d6a40e3ac1c1875f"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31e450840f2f27699d014cfc8865cc747184286b26d945bcea6042bb6aa4d26e"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01227f8b3e6c8961490d869aa65c99653df80d2f0a7fde8c64ebddab2b9b02fd"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69084fd29bfeff14816666c93a466e85414fe6b7d236cfc108a9c11afa6f7301"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d2b88efe65544a7d5121b0c3b003ebba92bfede2ea3577ce548b69c5235185"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ea961a674172ed2235d990d7edf85d15d8dfa23ab8575e48306371c070cda67"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5beffdbe766cfe4fb04f30644d822a1080b5359df7db3a63d30fa928375b2720"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:720f3108fb1bfa32e51db58b832898372eb5891e8472a8093008010911e324c5"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c2087dbb76a87ec2c619253e021e4fb20d1a72580feeaa6892b0b3d955175a71"},
- {file = "rpds_py-0.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ddd50f18ebc05ec29a0d9271e9dbe93997536da3546677f8ca00b76d477680c"},
- {file = "rpds_py-0.19.1.tar.gz", hash = "sha256:31dd5794837f00b46f4096aa8ccaa5972f73a938982e32ed817bb520c465e520"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
+ {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
+ {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
+ {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
+ {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
+ {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
+ {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
+ {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
]
[[package]]
@@ -1806,7 +1743,7 @@ name = "ruamel-yaml"
version = "0.18.6"
requires_python = ">=3.7"
summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"",
]
@@ -1820,8 +1757,8 @@ name = "ruamel-yaml-clib"
version = "0.2.8"
requires_python = ">=3.6"
summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
-groups = ["default", "dev", "tests"]
-marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""
+groups = ["default"]
+marker = "platform_python_implementation == \"CPython\""
files = [
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"},
@@ -1852,47 +1789,47 @@ files = [
[[package]]
name = "ruff"
-version = "0.5.4"
+version = "0.5.7"
requires_python = ">=3.7"
summary = "An extremely fast Python linter and code formatter, written in Rust."
groups = ["dev"]
files = [
- {file = "ruff-0.5.4-py3-none-linux_armv6l.whl", hash = "sha256:82acef724fc639699b4d3177ed5cc14c2a5aacd92edd578a9e846d5b5ec18ddf"},
- {file = "ruff-0.5.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:da62e87637c8838b325e65beee485f71eb36202ce8e3cdbc24b9fcb8b99a37be"},
- {file = "ruff-0.5.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e98ad088edfe2f3b85a925ee96da652028f093d6b9b56b76fc242d8abb8e2059"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c55efbecc3152d614cfe6c2247a3054cfe358cefbf794f8c79c8575456efe19"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9b85eaa1f653abd0a70603b8b7008d9e00c9fa1bbd0bf40dad3f0c0bdd06793"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf497a47751be8c883059c4613ba2f50dd06ec672692de2811f039432875278"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:09c14ed6a72af9ccc8d2e313d7acf7037f0faff43cde4b507e66f14e812e37f7"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:628f6b8f97b8bad2490240aa84f3e68f390e13fabc9af5c0d3b96b485921cd60"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3520a00c0563d7a7a7c324ad7e2cde2355733dafa9592c671fb2e9e3cd8194c1"},
- {file = "ruff-0.5.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93789f14ca2244fb91ed481456f6d0bb8af1f75a330e133b67d08f06ad85b516"},
- {file = "ruff-0.5.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:029454e2824eafa25b9df46882f7f7844d36fd8ce51c1b7f6d97e2615a57bbcc"},
- {file = "ruff-0.5.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9492320eed573a13a0bc09a2957f17aa733fff9ce5bf00e66e6d4a88ec33813f"},
- {file = "ruff-0.5.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6e1f62a92c645e2919b65c02e79d1f61e78a58eddaebca6c23659e7c7cb4ac7"},
- {file = "ruff-0.5.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:768fa9208df2bec4b2ce61dbc7c2ddd6b1be9fb48f1f8d3b78b3332c7d71c1ff"},
- {file = "ruff-0.5.4-py3-none-win32.whl", hash = "sha256:e1e7393e9c56128e870b233c82ceb42164966f25b30f68acbb24ed69ce9c3a4e"},
- {file = "ruff-0.5.4-py3-none-win_amd64.whl", hash = "sha256:58b54459221fd3f661a7329f177f091eb35cf7a603f01d9eb3eb11cc348d38c4"},
- {file = "ruff-0.5.4-py3-none-win_arm64.whl", hash = "sha256:bd53da65f1085fb5b307c38fd3c0829e76acf7b2a912d8d79cadcdb4875c1eb7"},
- {file = "ruff-0.5.4.tar.gz", hash = "sha256:2795726d5f71c4f4e70653273d1c23a8182f07dd8e48c12de5d867bfb7557eed"},
+ {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"},
+ {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"},
+ {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"},
+ {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"},
+ {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"},
+ {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"},
+ {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"},
+ {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"},
+ {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"},
]
[[package]]
name = "setuptools"
-version = "71.1.0"
+version = "72.2.0"
requires_python = ">=3.8"
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
- {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"},
- {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"},
+ {file = "setuptools-72.2.0-py3-none-any.whl", hash = "sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4"},
+ {file = "setuptools-72.2.0.tar.gz", hash = "sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9"},
]
[[package]]
name = "shexjsg"
version = "0.8.2"
summary = "ShExJSG - Astract Syntax Tree for the ShEx 2.0 language"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"pyjsg>=0.11.10",
]
@@ -1906,7 +1843,7 @@ name = "six"
version = "1.16.0"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
summary = "Python 2 and 3 compatibility utilities"
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -1916,7 +1853,7 @@ files = [
name = "sortedcontainers"
version = "2.4.0"
summary = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
@@ -1927,7 +1864,7 @@ name = "sparqlslurper"
version = "0.5.1"
requires_python = ">=3.7.4"
summary = "SPARQL Slurper for rdflib"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"rdflib-shim",
"rdflib>=5.0.0",
@@ -1943,7 +1880,7 @@ name = "sparqlwrapper"
version = "2.0.0"
requires_python = ">=3.7"
summary = "SPARQL Endpoint interface to Python"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"rdflib>=6.1.1",
]
@@ -1954,63 +1891,64 @@ files = [
[[package]]
name = "sqlalchemy"
-version = "2.0.31"
+version = "2.0.32"
requires_python = ">=3.7"
summary = "Database Abstraction Library"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"",
+ "importlib-metadata; python_version < \"3.8\"",
"typing-extensions>=4.6.0",
]
files = [
- {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"},
- {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"},
- {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"},
- {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"},
- {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"},
- {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"},
+ {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"},
+ {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"},
+ {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"},
+ {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"},
+ {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"},
]
[[package]]
name = "sybil"
-version = "5.0.3"
+version = "6.1.1"
requires_python = ">=3.7"
summary = "Automated testing for the examples in your code and documentation."
groups = ["dev", "tests"]
files = [
- {file = "sybil-5.0.3-py3-none-any.whl", hash = "sha256:6f3c30822169895c4fb34c8366bdb132cf62bb68fb1d03d2ebb05282eab08c95"},
- {file = "sybil-5.0.3.tar.gz", hash = "sha256:20dfe3a35a8d1ffcb4311434d1abf38c030c91064d75ff6b56ddd1060e08e758"},
+ {file = "sybil-6.1.1-py3-none-any.whl", hash = "sha256:04ae5e17997bc5166ba3da0d6244767c397e129f399a1aa23c89b30a704fec2c"},
+ {file = "sybil-6.1.1.tar.gz", hash = "sha256:8fb4f2c3582d1fe6705d1ae3a31f93fec7619f634940a8fcbf5d1b7d18183917"},
]
[[package]]
name = "tenacity"
-version = "8.5.0"
+version = "9.0.0"
requires_python = ">=3.8"
summary = "Retry code until it succeeds"
-groups = ["dev", "tests"]
+groups = ["plot"]
files = [
- {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"},
- {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"},
+ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
+ {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
]
[[package]]
@@ -2025,29 +1963,18 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
-[[package]]
-name = "toolz"
-version = "0.12.1"
-requires_python = ">=3.7"
-summary = "List processing tools and functional utilities"
-groups = ["default", "dev", "tests"]
-files = [
- {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"},
- {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"},
-]
-
[[package]]
name = "tqdm"
-version = "4.66.4"
+version = "4.66.5"
requires_python = ">=3.7"
summary = "Fast, Extensible Progress Meter"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
dependencies = [
"colorama; platform_system == \"Windows\"",
]
files = [
- {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
- {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
+ {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"},
+ {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"},
]
[[package]]
@@ -2055,7 +1982,7 @@ name = "types-python-dateutil"
version = "2.9.0.20240316"
requires_python = ">=3.8"
summary = "Typing stubs for python-dateutil"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"},
{file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"},
@@ -2066,18 +1993,29 @@ name = "typing-extensions"
version = "4.12.2"
requires_python = ">=3.8"
summary = "Backported and Experimental Type Hints for Python 3.8+"
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
+[[package]]
+name = "tzdata"
+version = "2024.1"
+requires_python = ">=2"
+summary = "Provider of IANA time zone data"
+groups = ["default"]
+files = [
+ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
+ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+]
+
[[package]]
name = "uri-template"
version = "1.3.0"
requires_python = ">=3.7"
summary = "RFC 6570 URI Template Processor"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"},
{file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"},
@@ -2102,7 +2040,7 @@ name = "urllib3"
version = "2.2.2"
requires_python = ">=3.8"
summary = "HTTP library with thread-safe connection pooling, file post, and more."
-groups = ["default", "dev", "tests"]
+groups = ["default", "dev", "plot", "tests"]
files = [
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
@@ -2110,48 +2048,44 @@ files = [
[[package]]
name = "watchdog"
-version = "4.0.1"
+version = "4.0.2"
requires_python = ">=3.8"
summary = "Filesystem events monitoring"
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
- {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"},
- {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"},
- {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
- {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
- {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"},
- {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"},
- {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"},
- {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"},
- {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"},
+ {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"},
+ {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"},
+ {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"},
+ {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"},
+ {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"},
+ {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"},
+ {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"},
+ {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"},
+ {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"},
+ {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"},
+ {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"},
+ {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"},
+ {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"},
+ {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"},
+ {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"},
+ {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"},
]
[[package]]
name = "webcolors"
-version = "24.6.0"
+version = "24.8.0"
requires_python = ">=3.8"
summary = "A library for working with the color formats defined by HTML and CSS."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
- {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"},
- {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"},
+ {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"},
+ {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"},
]
[[package]]
@@ -2159,7 +2093,7 @@ name = "werkzeug"
version = "3.0.3"
requires_python = ">=3.8"
summary = "The comprehensive WSGI web application library."
-groups = ["dev", "tests"]
+groups = ["plot"]
dependencies = [
"MarkupSafe>=2.1.1",
]
@@ -2173,7 +2107,7 @@ name = "wrapt"
version = "1.16.0"
requires_python = ">=3.6"
summary = "Module for decorators, wrappers and monkey patching."
-groups = ["default", "dev", "tests"]
+groups = ["default"]
files = [
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
@@ -2211,11 +2145,11 @@ files = [
[[package]]
name = "zipp"
-version = "3.19.2"
+version = "3.20.0"
requires_python = ">=3.8"
summary = "Backport of pathlib-compatible object wrapper for zip files"
-groups = ["default", "dev", "tests"]
+groups = ["plot"]
files = [
- {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
- {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
+ {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"},
+ {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"},
]
diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml
index d24847a..f603a55 100644
--- a/nwb_linkml/pyproject.toml
+++ b/nwb_linkml/pyproject.toml
@@ -15,15 +15,14 @@ dependencies = [
"rich>=13.5.2",
#"linkml>=1.7.10",
"linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml",
- "nptyping>=2.5.0",
"pydantic>=2.3.0",
"h5py>=3.9.0",
"pydantic-settings>=2.0.3",
- "dask>=2023.9.2",
"tqdm>=4.66.1",
'typing-extensions>=4.12.2;python_version<"3.11"',
- "numpydantic>=1.2.1",
+ "numpydantic>=1.3.3",
"black>=24.4.2",
+ "pandas>=2.2.2",
]
[project.urls]
@@ -37,21 +36,16 @@ plot = [
"dash-cytoscape<1.0.0,>=0.3.0",
]
tests = [
- "nwb-linkml[plot]",
- "pytest<8.0.0,>=7.4.0",
+ "nwb-linkml",
+ "pytest>=8.0.0",
"pytest-depends<2.0.0,>=1.0.1",
- "coverage<7.0.0,>=6.1.1",
- "pytest-md<1.0.0,>=0.2.0",
"pytest-cov<5.0.0,>=4.1.0",
- "coveralls<4.0.0,>=3.3.1",
- "pytest-profiling<2.0.0,>=1.7.0",
- "sybil<6.0.0,>=5.0.3",
+ "sybil>=6.0.3",
"requests-cache>=1.2.1",
]
dev = [
"nwb-linkml[tests]",
"ruff>=0.5.0",
- "black>=24.4.2",
]
[tool.pdm]
@@ -75,7 +69,9 @@ addopts = [
]
markers = [
"dev: tests that are just for development rather than testing correctness",
- "provider: tests for providers!"
+ "provider: tests for providers!",
+ "linkml: tests related to linkml generation",
+ "pydantic: tests related to pydantic generation"
]
testpaths = [
"src/nwb_linkml",
diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py
index 561df65..13e86fd 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py
@@ -5,16 +5,8 @@ Base class for adapters
import sys
from abc import abstractmethod
from dataclasses import dataclass, field
-from typing import (
- Any,
- Generator,
- List,
- Optional,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
+from logging import Logger
+from typing import Any, Generator, List, Literal, Optional, Tuple, Type, TypeVar, Union, overload
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import (
@@ -26,7 +18,8 @@ from linkml_runtime.linkml_model import (
)
from pydantic import BaseModel
-from nwb_schema_language import Attribute, Dataset, Group, Schema
+from nwb_linkml.logging import init_logger
+from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema
if sys.version_info.minor >= 11:
from typing import TypeVarTuple, Unpack
@@ -107,6 +100,15 @@ class BuildResult:
class Adapter(BaseModel):
"""Abstract base class for adapters"""
+ _logger: Optional[Logger] = None
+
+ @property
+ def logger(self) -> Logger:
+ """A logger with the name of the adapter class! See :class:`.config`"""
+ if self._logger is None:
+ self._logger = init_logger(self.__class__.__name__)
+ return self._logger
+
@abstractmethod
def build(self) -> "BuildResult":
"""
@@ -196,6 +198,14 @@ class Adapter(BaseModel):
if isinstance(item, tuple) and item[0] in field and item[1] is not None:
yield item[1]
+ @overload
+ def walk_field_values(
+ self,
+ input: Union[BaseModel, dict, list],
+ field: Literal["neurodata_type_def"],
+ value: Optional[Any] = None,
+ ) -> Generator[Group | Dataset, None, None]: ...
+
def walk_field_values(
self, input: Union[BaseModel, dict, list], field: str, value: Optional[Any] = None
) -> Generator[BaseModel, None, None]:
@@ -238,3 +248,43 @@ class Adapter(BaseModel):
for item in self.walk(input):
if any([type(item) is atype for atype in get_type]):
yield item
+
+
+def is_1d(cls: Dataset | Attribute) -> bool:
+ """
+ Check if the values of a dataset are 1-dimensional.
+
+ Specifically:
+ * a single-layer dim/shape list of length 1, or
+ * a nested dim/shape list where every nested spec is of length 1
+ """
+ if cls.dims is None:
+ return False
+
+ return (
+ not any([isinstance(dim, list) for dim in cls.dims]) and len(cls.dims) == 1
+ ) or ( # nested list
+ all([isinstance(dim, list) for dim in cls.dims])
+ and len(cls.dims) == 1
+ and len(cls.dims[0]) == 1
+ )
+
+
+def is_compound(cls: Dataset) -> bool:
+ """Check if dataset has a compound dtype"""
+ return (
+ isinstance(cls.dtype, list)
+ and len(cls.dtype) > 0
+ and isinstance(cls.dtype[0], CompoundDtype)
+ )
+
+
+def has_attrs(cls: Dataset) -> bool:
+ """
+ Check if a dataset has any attributes at all without defaults
+ """
+ return (
+ cls.attributes is not None
+ and len(cls.attributes) > 0
+ and all([not a.value for a in cls.attributes])
+ )
diff --git a/nwb_linkml/src/nwb_linkml/adapters/attribute.py b/nwb_linkml/src/nwb_linkml/adapters/attribute.py
new file mode 100644
index 0000000..ddf6edb
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/adapters/attribute.py
@@ -0,0 +1,197 @@
+"""
+Adapters for attribute types
+"""
+
+from abc import abstractmethod
+from typing import ClassVar, Optional, Type, TypedDict
+
+from linkml_runtime.linkml_model.meta import SlotDefinition
+
+from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d
+from nwb_linkml.adapters.array import ArrayAdapter
+from nwb_linkml.maps import Map
+from nwb_linkml.maps.dtype import handle_dtype
+from nwb_schema_language import Attribute
+
+
+def _make_ifabsent(val: str | int | float | None) -> str | None:
+ if val is None:
+ return None
+ elif isinstance(val, str):
+ return f"string({val})"
+ elif isinstance(val, int):
+ return f"integer({val})"
+ elif isinstance(val, float):
+ return f"float({val})"
+ else:
+ return str(val)
+
+
+class AttrDefaults(TypedDict):
+ """Default fields for an attribute"""
+
+ equals_string: str | None
+ equals_number: float | int | None
+ ifabsent: str | None
+
+
+class AttributeMap(Map):
+ """Base class for attribute mapping transformations :)"""
+
+ @classmethod
+ def handle_defaults(cls, attr: Attribute) -> AttrDefaults:
+ """
+ Construct arguments for linkml slot default metaslots from nwb schema lang attribute props
+ """
+ equals_string = None
+ equals_number = None
+ default_value = None
+ if attr.value:
+ if isinstance(attr.value, (int, float)):
+ equals_number = attr.value
+ elif attr.value:
+ equals_string = str(attr.value)
+
+ if equals_number:
+ default_value = _make_ifabsent(equals_number)
+ elif equals_string:
+ default_value = _make_ifabsent(equals_string)
+ elif attr.default_value:
+ default_value = _make_ifabsent(attr.default_value)
+
+ return AttrDefaults(
+ equals_string=equals_string, equals_number=equals_number, ifabsent=default_value
+ )
+
+ @classmethod
+ @abstractmethod
+ def check(cls, attr: Attribute) -> bool:
+ """
+ Check if this map applies
+ """
+ pass # pragma: no cover
+
+ @classmethod
+ @abstractmethod
+ def apply(
+ cls, attr: Attribute, res: Optional[BuildResult] = None, name: Optional[str] = None
+ ) -> BuildResult:
+ """
+ Apply this mapping
+ """
+ pass # pragma: no cover
+
+
+class MapScalar(AttributeMap):
+ """
+ Map a simple scalar value
+ """
+
+ @classmethod
+ def check(cls, attr: Attribute) -> bool:
+ """
+ Check if we are a scalar value!
+ """
+ return not attr.dims and not attr.shape
+
+ @classmethod
+ def apply(cls, attr: Attribute, res: Optional[BuildResult] = None) -> BuildResult:
+ """
+ Make a slot for us!
+ """
+ slot = SlotDefinition(
+ name=attr.name,
+ range=handle_dtype(attr.dtype),
+ description=attr.doc,
+ required=attr.required,
+ **cls.handle_defaults(attr),
+ )
+ return BuildResult(slots=[slot])
+
+
+class MapArray(AttributeMap):
+ """
+ Map an array value!
+ """
+
+ @classmethod
+ def check(cls, attr: Attribute) -> bool:
+ """
+ Check that we have some array specification!
+ """
+ return bool(attr.dims) or attr.shape
+
+ @classmethod
+ def apply(cls, attr: Attribute, res: Optional[BuildResult] = None) -> BuildResult:
+ """
+ Make a slot with an array expression!
+
+ If we're just a 1D array, use a list (set multivalued: true).
+ If more than that, make an array descriptor
+ """
+ expressions = {}
+ multivalued = False
+ if is_1d(attr):
+ multivalued = True
+ else:
+ # ---------------------------------
+ # SPECIAL CASE: Some old versions of HDMF don't have ``dims``, only shape
+ # ---------------------------------
+ shape = attr.shape
+ dims = attr.dims
+ if shape and not dims:
+ dims = ["null"] * len(shape)
+
+ array_adapter = ArrayAdapter(dims, shape)
+ expressions = array_adapter.make_slot()
+
+ slot = SlotDefinition(
+ name=attr.name,
+ range=handle_dtype(attr.dtype),
+ multivalued=multivalued,
+ description=attr.doc,
+ required=attr.required,
+ **expressions,
+ **cls.handle_defaults(attr),
+ )
+ return BuildResult(slots=[slot])
+
+
+class AttributeAdapter(Adapter):
+ """
+ Create slot definitions from nwb schema language attributes
+ """
+
+ TYPE: ClassVar[Type] = Attribute
+
+ cls: Attribute
+
+ def build(self) -> "BuildResult":
+ """
+ Build the slot definitions, every attribute should have a map.
+ """
+ map = self.match()
+ return map.apply(self.cls)
+
+ def match(self) -> Optional[Type[AttributeMap]]:
+ """
+ Find the map class that applies to this attribute
+
+ Returns:
+ :class:`.AttributeMap`
+
+ Raises:
+ RuntimeError - if more than one map matches
+ """
+ # find a map to use
+ matches = [m for m in AttributeMap.__subclasses__() if m.check(self.cls)]
+
+ if len(matches) > 1: # pragma: no cover
+ raise RuntimeError(
+ "Only one map should apply to a dataset, you need to refactor the maps! Got maps:"
+ f" {matches}"
+ )
+ elif len(matches) == 0:
+ return None
+ else:
+ return matches[0]
diff --git a/nwb_linkml/src/nwb_linkml/adapters/classes.py b/nwb_linkml/src/nwb_linkml/adapters/classes.py
index c700d53..0097e47 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/classes.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/classes.py
@@ -9,9 +9,10 @@ from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from pydantic import field_validator
from nwb_linkml.adapters.adapter import Adapter, BuildResult
+from nwb_linkml.adapters.attribute import AttributeAdapter
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
-from nwb_schema_language import CompoundDtype, Dataset, DTypeType, FlatDtype, Group, ReferenceDtype
+from nwb_schema_language import Dataset, Group
T = TypeVar("T", bound=Type[Dataset] | Type[Group])
TI = TypeVar("TI", bound=Dataset | Group)
@@ -118,22 +119,35 @@ class ClassAdapter(Adapter):
Returns:
list[:class:`.SlotDefinition`]
"""
- attrs = [
- SlotDefinition(
- name=attr.name,
- description=attr.doc,
- range=self.handle_dtype(attr.dtype),
- )
- for attr in cls.attributes
- ]
-
- return attrs
+ if cls.attributes is not None:
+ results = [AttributeAdapter(cls=attr).build() for attr in cls.attributes]
+ slots = [r.slots[0] for r in results]
+ return slots
+ else:
+ return []
def _get_full_name(self) -> str:
"""The full name of the object in the generated linkml
Distinct from 'name' which is the thing that's used to define position in
- a hierarchical data setting
+ a hierarchical data setting.
+
+ Combines names from ``parent``, if present, using ``"__"`` .
+ Rather than concatenating the full series of names with ``__`` like
+
+ * ``Parent``
+ * ``Parent__child1``
+ * ``Parent__child1__child2``
+
+ we only keep the last parent, so
+
+ * ``Parent``
+ * ``Parent__child1``
+ * ``child1__child2``
+
+ The assumption is that a child name may not be unique, but the combination of
+ a parent/child pair should be unique enough to avoid name shadowing without
+ making humongous and cumbersome names.
"""
if self.cls.neurodata_type_def:
name = self.cls.neurodata_type_def
@@ -141,7 +155,8 @@ class ClassAdapter(Adapter):
# not necessarily a unique name, so we combine parent names
name_parts = []
if self.parent is not None:
- name_parts.append(self.parent._get_full_name())
+ parent_name = self.parent._get_full_name().split("__")[-1]
+ name_parts.append(parent_name)
name_parts.append(self.cls.name)
name = "__".join(name_parts)
@@ -187,37 +202,6 @@ class ClassAdapter(Adapter):
return name
- @classmethod
- def handle_dtype(cls, dtype: DTypeType | None) -> str:
- """
- Get the string form of a dtype
-
- Args:
- dtype (:class:`.DTypeType`): Dtype to stringify
-
- Returns:
- str
- """
- if isinstance(dtype, ReferenceDtype):
- return dtype.target_type
- elif dtype is None or dtype == []:
- # Some ill-defined datasets are "abstract" despite that not being in the schema language
- return "AnyType"
- elif isinstance(dtype, FlatDtype):
- return dtype.value
- elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype):
- # there is precisely one class that uses compound dtypes:
- # TimeSeriesReferenceVectorData
- # compoundDtypes are able to define a ragged table according to the schema
- # but are used in this single case equivalently to attributes.
- # so we'll... uh... treat them as slots.
- # TODO
- return "AnyType"
-
- else:
- # flat dtype
- return dtype
-
def build_name_slot(self) -> SlotDefinition:
"""
If a class has a name, then that name should be a slot with a
diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py
index 8bc34b6..ef5eb61 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py
@@ -7,13 +7,13 @@ from typing import ClassVar, Optional, Type
from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition
-from nwb_linkml.adapters.adapter import BuildResult
+from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
-from nwb_linkml.maps.dtype import flat_to_linkml
+from nwb_linkml.maps.dtype import flat_to_linkml, handle_dtype
from nwb_linkml.maps.naming import camel_to_snake
-from nwb_schema_language import CompoundDtype, Dataset
+from nwb_schema_language import Dataset
class DatasetMap(Map):
@@ -106,7 +106,7 @@ class MapScalar(DatasetMap):
this_slot = SlotDefinition(
name=cls.name,
description=cls.doc,
- range=ClassAdapter.handle_dtype(cls.dtype),
+ range=handle_dtype(cls.dtype),
**QUANTITY_MAP[cls.quantity],
)
res = BuildResult(slots=[this_slot])
@@ -154,10 +154,14 @@ class MapScalarAttributes(DatasetMap):
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -203,9 +207,7 @@ class MapScalarAttributes(DatasetMap):
"""
Map to a scalar attribute with an adjoining "value" slot
"""
- value_slot = SlotDefinition(
- name="value", range=ClassAdapter.handle_dtype(cls.dtype), required=True
- )
+ value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True)
res.classes[0].attributes["value"] = value_slot
return res
@@ -216,8 +218,8 @@ class MapListlike(DatasetMap):
Used exactly once in the core schema, in ``ImageReferences`` -
an array of references to other ``Image`` datasets. We ignore the
- usual array structure and unnest the implicit array into a slot names from the
- target type rather than the oddly-named ``num_images`` dimension so that
+ usual array structure and unnest the implicit array into a slot named "value"
+ rather than the oddly-named ``num_images`` dimension so that
ultimately in the pydantic model we get a nicely behaved single-level list.
Examples:
@@ -245,12 +247,16 @@ class MapListlike(DatasetMap):
name: name
range: string
required: true
- image:
- name: image
+ value:
+ name: value
+ annotations:
+ source_type:
+ tag: source_type
+ value: reference
description: Ordered dataset of references to Image objects.
- multivalued: true
range: Image
required: true
+ multivalued: true
tree_root: true
"""
@@ -271,7 +277,7 @@ class MapListlike(DatasetMap):
* - ``dtype``
- ``Class``
"""
- dtype = ClassAdapter.handle_dtype(cls.dtype)
+ dtype = handle_dtype(cls.dtype)
return (
cls.neurodata_type_inc != "VectorData"
and is_1d(cls)
@@ -286,15 +292,15 @@ class MapListlike(DatasetMap):
"""
Map to a list of the given class
"""
- dtype = camel_to_snake(ClassAdapter.handle_dtype(cls.dtype))
slot = SlotDefinition(
- name=dtype,
+ name="value",
multivalued=True,
- range=ClassAdapter.handle_dtype(cls.dtype),
+ range=handle_dtype(cls.dtype),
description=cls.doc,
required=cls.quantity not in ("*", "?"),
+ annotations=[{"source_type": "reference"}],
)
- res.classes[0].attributes[dtype] = slot
+ res.classes[0].attributes["value"] = slot
return res
@@ -378,7 +384,7 @@ class MapArraylike(DatasetMap):
- ``False``
"""
- dtype = ClassAdapter.handle_dtype(cls.dtype)
+ dtype = handle_dtype(cls.dtype)
return (
cls.name
and (all([cls.dims, cls.shape]) or cls.neurodata_type_inc == "VectorData")
@@ -409,7 +415,7 @@ class MapArraylike(DatasetMap):
SlotDefinition(
name=name,
multivalued=False,
- range=ClassAdapter.handle_dtype(cls.dtype),
+ range=handle_dtype(cls.dtype),
description=cls.doc,
required=cls.quantity not in ("*", "?"),
**expressions,
@@ -478,12 +484,14 @@ class MapArrayLikeAttributes(DatasetMap):
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -513,7 +521,7 @@ class MapArrayLikeAttributes(DatasetMap):
"""
Check that we're an array with some additional metadata
"""
- dtype = ClassAdapter.handle_dtype(cls.dtype)
+ dtype = handle_dtype(cls.dtype)
return (
all([cls.dims, cls.shape])
and cls.neurodata_type_inc != "VectorData"
@@ -532,10 +540,8 @@ class MapArrayLikeAttributes(DatasetMap):
array_adapter = ArrayAdapter(cls.dims, cls.shape)
expressions = array_adapter.make_slot()
# make a slot for the arraylike class
- array_slot = SlotDefinition(
- name="array", range=ClassAdapter.handle_dtype(cls.dtype), **expressions
- )
- res.classes[0].attributes.update({"array": array_slot})
+ array_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), **expressions)
+ res.classes[0].attributes.update({"value": array_slot})
return res
@@ -572,7 +578,7 @@ class MapClassRange(DatasetMap):
name=cls.name,
description=cls.doc,
range=f"{cls.neurodata_type_inc}",
- annotations=[{"named": True}],
+ annotations=[{"named": True}, {"source_type": "neurodata_type_inc"}],
**QUANTITY_MAP[cls.quantity],
)
res = BuildResult(slots=[this_slot])
@@ -596,7 +602,7 @@ class MapVectorClassRange(DatasetMap):
Check that we are a VectorData object without any additional attributes
with a dtype that refers to another class
"""
- dtype = ClassAdapter.handle_dtype(cls.dtype)
+ dtype = handle_dtype(cls.dtype)
return (
cls.neurodata_type_inc == "VectorData"
and cls.name
@@ -617,7 +623,7 @@ class MapVectorClassRange(DatasetMap):
name=cls.name,
description=cls.doc,
multivalued=True,
- range=ClassAdapter.handle_dtype(cls.dtype),
+ range=handle_dtype(cls.dtype),
required=cls.quantity not in ("*", "?"),
)
res = BuildResult(slots=[this_slot])
@@ -672,7 +678,7 @@ class MapVectorClassRange(DatasetMap):
# this_slot = SlotDefinition(
# name=cls.name,
# description=cls.doc,
-# range=ClassAdapter.handle_dtype(cls.dtype),
+# range=handle_dtype(cls.dtype),
# multivalued=True,
# )
# # No need to make a class for us, so we replace the existing build results
@@ -686,17 +692,28 @@ class MapNVectors(DatasetMap):
Most commonly: ``VectorData`` is subclassed without a name and with a '*' quantity to indicate
arbitrary columns.
+
+ Used twice:
+ - Images
+ - DynamicTable (and all its uses)
+
+ DynamicTable (and the slot VectorData where this is called for)
+ is handled specially and just dropped, because we handle the possibility for
+ arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes.
+
+ So really this is just a handler for the `Images` case
"""
@classmethod
def check(c, cls: Dataset) -> bool:
"""
- Check for being an unnamed multivalued vector class
+ Check for being an unnamed multivalued vector class that isn't VectorData
"""
return (
cls.name is None
and cls.neurodata_type_def is None
and cls.neurodata_type_inc
+ and cls.neurodata_type_inc != "VectorData"
and cls.quantity in ("*", "+")
)
@@ -725,6 +742,10 @@ class MapCompoundDtype(DatasetMap):
We render them just as a class with each of the dtypes as slots - they are
typically used by other datasets to create a table.
+ Since there is exactly one class (``TimeSeriesReferenceVectorData``) that uses compound dtypes
+ meaningfully, we just hardcode the behavior of inheriting the array shape from the VectorData
+ parent classes. Otherwise, linkml schemas correctly propagate the ``value`` property.
+
Eg. ``base.TimeSeriesReferenceVectorData``
.. code-block:: yaml
@@ -772,10 +793,14 @@ class MapCompoundDtype(DatasetMap):
slots[a_dtype.name] = SlotDefinition(
name=a_dtype.name,
description=a_dtype.doc,
- range=ClassAdapter.handle_dtype(a_dtype.dtype),
+ range=handle_dtype(a_dtype.dtype),
+ array=ArrayExpression(exact_number_dimensions=1),
**QUANTITY_MAP[cls.quantity],
)
res.classes[0].attributes.update(slots)
+
+ if "value" in res.classes[0].attributes:
+ del res.classes[0].attributes["value"]
return res
@@ -825,36 +850,3 @@ class DatasetAdapter(ClassAdapter):
return None
else:
return matches[0]
-
-
-def is_1d(cls: Dataset) -> bool:
- """
- Check if the values of a dataset are 1-dimensional.
-
- Specifically:
- * a single-layer dim/shape list of length 1, or
- * a nested dim/shape list where every nested spec is of length 1
- """
- return (
- not any([isinstance(dim, list) for dim in cls.dims]) and len(cls.dims) == 1
- ) or ( # nested list
- all([isinstance(dim, list) for dim in cls.dims])
- and len(cls.dims) == 1
- and len(cls.dims[0]) == 1
- )
-
-
-def is_compound(cls: Dataset) -> bool:
- """Check if dataset has a compound dtype"""
- return (
- isinstance(cls.dtype, list)
- and len(cls.dtype) > 0
- and isinstance(cls.dtype[0], CompoundDtype)
- )
-
-
-def has_attrs(cls: Dataset) -> bool:
- """
- Check if a dataset has any attributes at all without defaults
- """
- return len(cls.attributes) > 0 and all([not a.value for a in cls.attributes])
diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py
index 3b75487..13a03b7 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/group.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/group.py
@@ -2,7 +2,7 @@
Adapter for NWB groups to linkml Classes
"""
-from typing import Type
+from typing import List, Type
from linkml_runtime.linkml_model import SlotDefinition
@@ -28,25 +28,13 @@ class GroupAdapter(ClassAdapter):
Do the translation, yielding the BuildResult
"""
# Handle container groups with only * quantity unnamed groups
- if len(self.cls.groups) > 0 and all(
- [self._check_if_container(g) for g in self.cls.groups]
+ if (
+ len(self.cls.groups) > 0
+ and not self.cls.links
+ and all([self._check_if_container(g) for g in self.cls.groups])
): # and \
# self.parent is not None:
return self.handle_container_group(self.cls)
- # Or you can have groups like /intervals where there are some named groups, and some unnamed
- # but they all have the same type
- elif (
- len(self.cls.groups) > 0
- and all(
- [
- g.neurodata_type_inc == self.cls.groups[0].neurodata_type_inc
- for g in self.cls.groups
- ]
- )
- and self.cls.groups[0].neurodata_type_inc is not None
- and all([g.quantity in ("?", "*") for g in self.cls.groups])
- ):
- return self.handle_container_group(self.cls)
# handle if we are a terminal container group without making a new class
if (
@@ -58,17 +46,42 @@ class GroupAdapter(ClassAdapter):
return self.handle_container_slot(self.cls)
nested_res = self.build_subclasses()
+ # add links
+ links = self.build_links()
+
# we don't propagate slots up to the next level since they are meant for this
# level (ie. a way to refer to our children)
- res = self.build_base(extra_attrs=nested_res.slots)
+ res = self.build_base(extra_attrs=nested_res.slots + links)
# we do propagate classes tho
res.classes.extend(nested_res.classes)
return res
+ def build_links(self) -> List[SlotDefinition]:
+ """
+ Build links specified in the ``links`` field as slots that refer to other
+ classes, with an additional annotation specifying that they are in fact links.
+
+ Link slots can take either the object itself or the path to that object in the
+ file hierarchy as a string.
+ """
+ if not self.cls.links:
+ return []
+
+ slots = [
+ SlotDefinition(
+ name=link.name,
+ any_of=[{"range": link.target_type}, {"range": "string"}],
+ annotations=[{"tag": "source_type", "value": "link"}],
+ **QUANTITY_MAP[link.quantity],
+ )
+ for link in self.cls.links
+ ]
+ return slots
+
def handle_container_group(self, cls: Group) -> BuildResult:
"""
- Make a special LinkML `children` slot that can
+ Make a special LinkML `value` slot that can
have any number of the objects that are of `neurodata_type_inc` class
Examples:
@@ -84,14 +97,11 @@ class GroupAdapter(ClassAdapter):
doc: Images objects containing images of presented stimuli.
quantity: '*'
- Args:
- children (List[:class:`.Group`]): Child groups
-
"""
# don't build subgroups as their own classes, just make a slot
# that can contain them
- name = cls.name if self.cls.name else "children"
+ name = cls.name if self.cls.name else "value"
slot = SlotDefinition(
name=name,
diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
index ca354a5..266906e 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py
@@ -13,7 +13,7 @@ from typing import Dict, List, Optional
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import Annotation, SchemaDefinition
-from pydantic import Field, PrivateAttr
+from pydantic import Field, model_validator
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.schema import SchemaAdapter
@@ -31,12 +31,6 @@ class NamespacesAdapter(Adapter):
schemas: List[SchemaAdapter]
imported: List["NamespacesAdapter"] = Field(default_factory=list)
- _imports_populated: bool = PrivateAttr(False)
-
- def __init__(self, **kwargs: dict):
- super().__init__(**kwargs)
- self._populate_schema_namespaces()
-
@classmethod
def from_yaml(cls, path: Path) -> "NamespacesAdapter":
"""
@@ -70,8 +64,6 @@ class NamespacesAdapter(Adapter):
"""
Build the NWB namespace to the LinkML Schema
"""
- if not self._imports_populated and not skip_imports:
- self.populate_imports()
sch_result = BuildResult()
for sch in self.schemas:
@@ -129,6 +121,7 @@ class NamespacesAdapter(Adapter):
return sch_result
+ @model_validator(mode="after")
def _populate_schema_namespaces(self) -> None:
"""
annotate for each schema which namespace imports it
@@ -143,6 +136,7 @@ class NamespacesAdapter(Adapter):
sch.namespace = ns.name
sch.version = ns.version
break
+ return self
def find_type_source(self, name: str) -> SchemaAdapter:
"""
@@ -182,7 +176,8 @@ class NamespacesAdapter(Adapter):
else:
raise KeyError(f"No schema found that define {name}")
- def populate_imports(self) -> None:
+ @model_validator(mode="after")
+ def populate_imports(self) -> "NamespacesAdapter":
"""
Populate the imports that are needed for each schema file
@@ -199,11 +194,7 @@ class NamespacesAdapter(Adapter):
if depends_on not in sch.imports:
sch.imports.append(depends_on)
- # do so recursively
- for imported in self.imported:
- imported.populate_imports()
-
- self._imports_populated = True
+ return self
def to_yaml(self, base_dir: Path) -> None:
"""
@@ -266,10 +257,7 @@ class NamespacesAdapter(Adapter):
else:
ns = ns[0]
- schema_names = []
- for sch in ns.schema_:
- if sch.source is not None:
- schema_names.append(sch.source)
+ schema_names = [sch.source for sch in ns.schema_ if sch.source is not None]
return schema_names
def schema_namespace(self, name: str) -> Optional[str]:
diff --git a/nwb_linkml/src/nwb_linkml/adapters/schema.py b/nwb_linkml/src/nwb_linkml/adapters/schema.py
index 4f03944..e6316b7 100644
--- a/nwb_linkml/src/nwb_linkml/adapters/schema.py
+++ b/nwb_linkml/src/nwb_linkml/adapters/schema.py
@@ -42,7 +42,8 @@ class SchemaAdapter(Adapter):
"""
The namespace.schema name for a single schema
"""
- return ".".join([self.namespace, self.path.with_suffix("").name])
+ namespace = self.namespace if self.namespace is not None else ""
+ return ".".join([namespace, self.path.with_suffix("").name])
def __repr__(self):
out_str = "\n" + self.name + "\n"
diff --git a/nwb_linkml/src/nwb_linkml/config.py b/nwb_linkml/src/nwb_linkml/config.py
index 8fa84f7..6bea3d5 100644
--- a/nwb_linkml/src/nwb_linkml/config.py
+++ b/nwb_linkml/src/nwb_linkml/config.py
@@ -4,8 +4,10 @@ Manage the operation of nwb_linkml from environmental variables
import tempfile
from pathlib import Path
+from typing import Literal, Optional
from pydantic import (
+ BaseModel,
DirectoryPath,
Field,
FieldValidationInfo,
@@ -15,15 +17,68 @@ from pydantic import (
)
from pydantic_settings import BaseSettings, SettingsConfigDict
+LOG_LEVELS = Literal["DEBUG", "INFO", "WARNING", "ERROR"]
+
+
+class LogConfig(BaseModel):
+ """
+ Configuration for logging
+ """
+
+ level: LOG_LEVELS = "INFO"
+ """
+ Severity of log messages to process.
+ """
+ level_file: Optional[LOG_LEVELS] = None
+ """
+ Severity for file-based logging. If unset, use ``level``
+ """
+ level_stdout: Optional[LOG_LEVELS] = "WARNING"
+ """
+ Severity for stream-based logging. If unset, use ``level``
+ """
+ file_n: int = 5
+ """
+ Number of log files to rotate through
+ """
+ file_size: int = 2**22 # roughly 4MB
+ """
+ Maximum size of log files (bytes)
+ """
+
+ @field_validator("level", "level_file", "level_stdout", mode="before")
+ @classmethod
+ def uppercase_levels(cls, value: Optional[str] = None) -> Optional[str]:
+ """
+ Ensure log level strings are uppercased
+ """
+ if value is not None:
+ value = value.upper()
+ return value
+
+ @model_validator(mode="after")
+ def inherit_base_level(self) -> "LogConfig":
+ """
+ If loglevels for specific output streams are unset, set from base :attr:`.level`
+ """
+ levels = ("level_file", "level_stdout")
+ for level_name in levels:
+ if getattr(self, level_name) is None:
+ setattr(self, level_name, self.level)
+ return self
+
class Config(BaseSettings):
"""
Configuration for nwb_linkml, populated by default but can be overridden
by environment variables.
+ Nested models can be assigned from .env files with a __ (see examples)
+
Examples:
export NWB_LINKML_CACHE_DIR="/home/mycache/dir"
+ export NWB_LINKML_LOGS__LEVEL="debug"
"""
@@ -32,6 +87,11 @@ class Config(BaseSettings):
default_factory=lambda: Path(tempfile.gettempdir()) / "nwb_linkml__cache",
description="Location to cache generated schema and models",
)
+ log_dir: Path = Field(
+ Path("logs"),
+ description="Location to store logs. If a relative directory, relative to ``cache_dir``",
+ )
+ logs: LogConfig = Field(LogConfig(), description="Log configuration")
@computed_field
@property
@@ -62,6 +122,15 @@ class Config(BaseSettings):
assert v.exists()
return v
+ @model_validator(mode="after")
+ def log_dir_relative_to_cache_dir(self) -> "Config":
+ """
+ If log dir is relative, put it beneath the cache_dir
+ """
+ if not self.log_dir.is_absolute():
+ self.log_dir = self.cache_dir / self.log_dir
+ return self
+
@model_validator(mode="after")
def folders_exist(self) -> "Config":
"""
diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py
index b42c83a..0cdfd23 100644
--- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py
+++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py
@@ -1,74 +1,48 @@
"""
Subclass of :class:`linkml.generators.PydanticGenerator`
+customized to support NWB models.
-The pydantic generator is a subclass of
-- :class:`linkml.utils.generator.Generator`
-- :class:`linkml.generators.oocodegen.OOCodeGenerator`
-
-The default `__main__` method
-- Instantiates the class
-- Calls :meth:`~linkml.generators.PydanticGenerator.serialize`
-
-The `serialize` method:
-
-- Accepts an optional jinja-style template, otherwise it uses the default template
-- Uses :class:`linkml_runtime.utils.schemaview.SchemaView` to interact with the schema
-- Generates linkML Classes
- - `generate_enums` runs first
-
-.. note::
-
- This module is heinous. We have mostly copied and pasted the existing :class:`linkml.generators.PydanticGenerator`
- and overridden what we need to make this work for NWB, but the source is...
- a little messy. We will be tidying this up and trying to pull changes upstream,
- but for now this is just our hacky little secret.
-
+See class and module docstrings for details :)
"""
-# FIXME: Remove this after we refactor this generator
-# ruff: noqa
-
-import inspect
-import pdb
import re
import sys
-import warnings
-from copy import copy
from dataclasses import dataclass, field
from pathlib import Path
from types import ModuleType
-from typing import ClassVar, Dict, List, Optional, Tuple, Type, Union
+from typing import ClassVar, Dict, List, Optional, Tuple
from linkml.generators import PydanticGenerator
-from linkml.generators.pydanticgen.build import SlotResult
from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray
-from linkml.generators.pydanticgen.template import PydanticModule, Import, Imports
+from linkml.generators.pydanticgen.build import ClassResult, SlotResult
+from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule
from linkml_runtime.linkml_model.meta import (
- Annotation,
- AnonymousSlotExpression,
ArrayExpression,
- ClassDefinition,
- ClassDefinitionName,
- ElementName,
SchemaDefinition,
SlotDefinition,
SlotDefinitionName,
)
from linkml_runtime.utils.compile_python import file_text
-from linkml_runtime.utils.formatutils import camelcase, underscore, remove_empty_items
+from linkml_runtime.utils.formatutils import remove_empty_items
from linkml_runtime.utils.schemaview import SchemaView
-from pydantic import BaseModel
-
-from nwb_linkml.maps import flat_to_nptyping
-from nwb_linkml.maps.naming import module_case, version_module_case
-from nwb_linkml.includes.types import ModelTypeString, _get_name, NamedString, NamedImports
+from nwb_linkml.includes.base import BASEMODEL_GETITEM
+from nwb_linkml.includes.hdmf import (
+ DYNAMIC_TABLE_IMPORTS,
+ DYNAMIC_TABLE_INJECTS,
+ TSRVD_IMPORTS,
+ TSRVD_INJECTS,
+)
+from nwb_linkml.includes.types import ModelTypeString, NamedImports, NamedString, _get_name
OPTIONAL_PATTERN = re.compile(r"Optional\[([\w\.]*)\]")
@dataclass
class NWBPydanticGenerator(PydanticGenerator):
+ """
+ Subclass of pydantic generator, custom behavior is in overridden lifecycle methods :)
+ """
injected_fields: List[str] = (
(
@@ -76,6 +50,7 @@ class NWBPydanticGenerator(PydanticGenerator):
' is stored in an NWB file")'
),
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
+ BASEMODEL_GETITEM,
)
split: bool = True
imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")])
@@ -95,7 +70,10 @@ class NWBPydanticGenerator(PydanticGenerator):
def _check_anyof(
self, s: SlotDefinition, sn: SlotDefinitionName, sv: SchemaView
- ): # pragma: no cover
+ ) -> None: # pragma: no cover
+ """
+ Overridden to allow `array` in any_of
+ """
# Confirm that the original slot range (ignoring the default that comes in from
# induced_slot) isn't in addition to setting any_of
allowed_keys = ("array",)
@@ -104,7 +82,7 @@ class NWBPydanticGenerator(PydanticGenerator):
allowed = True
for option in s.any_of:
items = remove_empty_items(option)
- if not all([key in allowed_keys for key in items.keys()]):
+ if not all([key in allowed_keys for key in items]):
allowed = False
if allowed:
return
@@ -116,6 +94,14 @@ class NWBPydanticGenerator(PydanticGenerator):
if not base_range_subsumes_any_of:
raise ValueError("Slot cannot have both range and any_of defined")
+ def before_generate_slot(self, slot: SlotDefinition, sv: SchemaView) -> SlotDefinition:
+ """
+ Force some properties to be optional
+ """
+ if slot.name == "target" and "index" in slot.description:
+ slot.required = False
+ return slot
+
def after_generate_slot(self, slot: SlotResult, sv: SchemaView) -> SlotResult:
"""
- strip unwanted metadata
@@ -127,7 +113,16 @@ class NWBPydanticGenerator(PydanticGenerator):
return slot
+ def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult:
+ """Customize dynamictable behavior"""
+ cls = AfterGenerateClass.inject_dynamictable(cls)
+ cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv)
+ return cls
+
def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule:
+ """
+ Remove source file from metadata
+ """
if "source_file" in template.meta:
del template.meta["source_file"]
return template
@@ -159,6 +154,9 @@ class AfterGenerateSlot:
@staticmethod
def skip_meta(slot: SlotResult, skip_meta: tuple[str]) -> SlotResult:
+ """
+ Skip additional metadata slots
+ """
for key in skip_meta:
if key in slot.attribute.meta:
del slot.attribute.meta[key]
@@ -227,13 +225,91 @@ class AfterGenerateSlot:
return slot
+class AfterGenerateClass:
+ """
+ Container class for class-modification methods
+ """
+
+ @staticmethod
+ def inject_dynamictable(cls: ClassResult) -> ClassResult:
+ """
+ Modify dynamictable class bases and inject needed objects :)
+ Args:
+ cls:
+
+ Returns:
+
+ """
+ if cls.cls.name in "DynamicTable":
+ cls.cls.bases = ["DynamicTableMixin"]
+
+ if cls.injected_classes is None:
+ cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy()
+ else:
+ cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy())
+
+ if isinstance(cls.imports, Imports):
+ cls.imports += DYNAMIC_TABLE_IMPORTS
+ elif isinstance(cls.imports, list):
+ cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS
+ else:
+ cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
+ elif cls.cls.name == "VectorData":
+ cls.cls.bases = ["VectorDataMixin"]
+ elif cls.cls.name == "VectorIndex":
+ cls.cls.bases = ["VectorIndexMixin"]
+ elif cls.cls.name == "DynamicTableRegion":
+ cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"]
+ elif cls.cls.name == "AlignedDynamicTable":
+ cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"]
+ elif cls.cls.name == "TimeSeriesReferenceVectorData":
+ # in core.nwb.base, so need to inject and import again
+ cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"]
+ if cls.injected_classes is None:
+ cls.injected_classes = TSRVD_INJECTS.copy()
+ else:
+ cls.injected_classes.extend(TSRVD_INJECTS.copy())
+
+ if isinstance(cls.imports, Imports):
+ cls.imports += TSRVD_IMPORTS
+ elif isinstance(cls.imports, list):
+ cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS
+ else:
+ cls.imports = TSRVD_IMPORTS.model_copy()
+
+ return cls
+
+ @staticmethod
+ def wrap_dynamictable_columns(cls: ClassResult, sv: SchemaView) -> ClassResult:
+ """
+ Wrap NDArray columns inside of dynamictables with ``VectorData`` or
+ ``VectorIndex``, which are generic classes whose value slot is
+ parameterized by the NDArray
+ """
+ if cls.source.is_a == "DynamicTable" or "DynamicTable" in sv.class_ancestors(
+ cls.source.name
+ ):
+ for an_attr in cls.cls.attributes:
+ if "NDArray" in (slot_range := cls.cls.attributes[an_attr].range):
+ if an_attr.endswith("_index"):
+ cls.cls.attributes[an_attr].range = "".join(
+ ["VectorIndex[", slot_range, "]"]
+ )
+ else:
+ cls.cls.attributes[an_attr].range = "".join(
+ ["VectorData[", slot_range, "]"]
+ )
+ return cls
+
+
def compile_python(
text_or_fn: str, package_path: Path = None, module_name: str = "test"
) -> ModuleType:
"""
Compile the text or file and return the resulting module
@param text_or_fn: Python text or file name that references python file
- @param package_path: Root package path. If omitted and we've got a python file, the package is the containing
+ @param package_path: Root package path. If omitted and we've got a python file,
+ the package is the containing
directory
@return: Compiled module
"""
diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py
new file mode 100644
index 0000000..ed69bf3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/includes/base.py
@@ -0,0 +1,14 @@
+"""
+Modifications to the ConfiguredBaseModel used by all generated classes
+"""
+
+BASEMODEL_GETITEM = """
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ \"\"\"Try and get a value from value or "data" if we have it\"\"\"
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+"""
diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py
index 32647e7..19bbf34 100644
--- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py
+++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py
@@ -2,38 +2,913 @@
Special types for mimicking HDMF special case behavior
"""
-from typing import Any
+import sys
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ ClassVar,
+ Dict,
+ Generic,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+ overload,
+)
-from pydantic import BaseModel, ConfigDict
+import numpy as np
+import pandas as pd
+from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport
+from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ ValidationError,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ field_validator,
+ model_validator,
+)
+
+if TYPE_CHECKING: # pragma: no cover
+ from nwb_linkml.models import VectorData, VectorIndex
+
+T = TypeVar("T", bound=NDArray)
+T_INJECT = 'T = TypeVar("T", bound=NDArray)'
class DynamicTableMixin(BaseModel):
"""
Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
"""
- model_config = ConfigDict(extra="allow")
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
- # @model_validator(mode='after')
- # def ensure_equal_length(cls, model: 'DynamicTableMixin') -> 'DynamicTableMixin':
- # """
- # Ensure all vectors are of equal length
- # """
- # raise NotImplementedError('TODO')
- #
- # @model_validator(mode="after")
- # def create_index_backrefs(cls, model: 'DynamicTableMixin') -> 'DynamicTableMixin':
- # """
- # Ensure that vectordata with vectorindexes know about them
- # """
- # raise NotImplementedError('TODO')
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
- def __getitem__(self, item: str) -> Any:
- raise NotImplementedError("TODO")
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
def __setitem__(self, key: str, value: Any) -> None:
- raise NotImplementedError("TODO")
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
-# class VectorDataMixin(BaseModel):
-# index: Optional[BaseModel] = None
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
+ """
+ Mixin class for TimeSeriesReferenceVectorData -
+ very simple, just indexing the given timeseries object.
+
+ These shouldn't have additional fields in them, just the three columns
+ for index, span, and timeseries
+ """
+
+ idx_start: NDArray[Shape["*"], int]
+ count: NDArray[Shape["*"], int]
+ timeseries: NDArray
+
+ @model_validator(mode="after")
+ def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin":
+ """
+ Each of the three indexing columns must be the same length to work!
+ """
+ assert len(self.idx_start) == len(self.timeseries) == len(self.count), (
+ f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)},"
+ f" timeseries: {len(self.timeseries)}"
+ )
+ return self
+
+ def __len__(self) -> int:
+ """Since we have ensured equal length, just return idx_start"""
+ return len(self.idx_start)
+
+ @overload
+ def _slice_helper(self, item: int) -> slice: ...
+
+ @overload
+ def _slice_helper(self, item: slice) -> List[slice]: ...
+
+ def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]:
+ if isinstance(item, (int, np.integer)):
+ return slice(self.idx_start[item], self.idx_start[item] + self.count[item])
+ else:
+ starts = self.idx_start[item]
+ ends = starts + self.count[item]
+ return [slice(start, end) for start, end in zip(starts, ends)]
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+
+ if isinstance(item, (int, np.integer)):
+ return self.timeseries[item][self._slice_helper(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.idx_start)))
+ return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item]
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, must be an int, slice, or iterable"
+ )
+
+ def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+ if isinstance(key, (int, np.integer)):
+ self.timeseries[key][self._slice_helper(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.idx_start)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " target Timeseries object if you need more control"
+ )
+ for subitem, subvalue in zip(key, value):
+ self.timeseries[subitem][self._slice_helper(subitem)] = subvalue
+ else:
+ for subitem in key:
+ self.timeseries[subitem][self._slice_helper(subitem)] = value
+ else:
+ raise ValueError(
+ f"Dont know how to index with {key}, must be an int, slice, or iterable"
+ )
+
+
+DYNAMIC_TABLE_IMPORTS = Imports(
+ imports=[
+ Import(module="pandas", alias="pd"),
+ Import(
+ module="typing",
+ objects=[
+ ObjectImport(name="ClassVar"),
+ ObjectImport(name="Generic"),
+ ObjectImport(name="Iterable"),
+ ObjectImport(name="Tuple"),
+ ObjectImport(name="TypeVar"),
+ ObjectImport(name="overload"),
+ ],
+ ),
+ Import(
+ module="numpydantic", objects=[ObjectImport(name="NDArray"), ObjectImport(name="Shape")]
+ ),
+ Import(
+ module="pydantic",
+ objects=[
+ ObjectImport(name="model_validator"),
+ ObjectImport(name="field_validator"),
+ ObjectImport(name="ValidationInfo"),
+ ObjectImport(name="ValidatorFunctionWrapHandler"),
+ ObjectImport(name="ValidationError"),
+ ],
+ ),
+ Import(module="numpy", alias="np"),
+ ]
+)
+"""
+Imports required for the dynamic table mixin
+
+VectorData is purposefully excluded as an import or an inject so that it will be
+resolved to the VectorData definition in the generated module
+"""
+DYNAMIC_TABLE_INJECTS = [
+ T_INJECT,
+ VectorDataMixin,
+ VectorIndexMixin,
+ DynamicTableRegionMixin,
+ DynamicTableMixin,
+ AlignedDynamicTableMixin,
+]
+
+TSRVD_IMPORTS = Imports(
+ imports=[
+ Import(
+ module="typing",
+ objects=[
+ ObjectImport(name="Generic"),
+ ObjectImport(name="Iterable"),
+ ObjectImport(name="Tuple"),
+ ObjectImport(name="TypeVar"),
+ ObjectImport(name="overload"),
+ ],
+ ),
+ Import(module="pydantic", objects=[ObjectImport(name="model_validator")]),
+ ]
+)
+"""Imports for TimeSeriesReferenceVectorData"""
+TSRVD_INJECTS = [T_INJECT, VectorDataMixin, TimeSeriesReferenceVectorDataMixin]
+
+if "pytest" in sys.modules:
+ # during testing define concrete subclasses...
+ class VectorData(VectorDataMixin):
+ """VectorData subclass for testing"""
+
+ pass
+
+ class VectorIndex(VectorIndexMixin):
+ """VectorIndex subclass for testing"""
+
+ pass
+
+ class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """DynamicTableRegion subclass for testing"""
+
+ pass
+
+ class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin):
+ """TimeSeriesReferenceVectorData subclass for testing"""
+
+ pass
diff --git a/nwb_linkml/src/nwb_linkml/includes/types.py b/nwb_linkml/src/nwb_linkml/includes/types.py
index 049aa65..2604eb5 100644
--- a/nwb_linkml/src/nwb_linkml/includes/types.py
+++ b/nwb_linkml/src/nwb_linkml/includes/types.py
@@ -19,7 +19,7 @@ ModelTypeString = """ModelType = TypeVar("ModelType", bound=Type[BaseModel])"""
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py
index d902bd9..387e4a6 100644
--- a/nwb_linkml/src/nwb_linkml/io/hdf5.py
+++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py
@@ -242,10 +242,7 @@ def find_references(h5f: h5py.File, path: str) -> List[str]:
def _find_references(name: str, obj: h5py.Group | h5py.Dataset) -> None:
pbar.update()
- refs = []
- for attr in obj.attrs.values():
- if isinstance(attr, h5py.h5r.Reference):
- refs.append(attr)
+ refs = [attr for attr in obj.attrs.values() if isinstance(attr, h5py.h5r.Reference)]
if isinstance(obj, h5py.Dataset):
# dataset is all references
diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py
index 3e2a76e..42718f5 100644
--- a/nwb_linkml/src/nwb_linkml/io/schema.py
+++ b/nwb_linkml/src/nwb_linkml/io/schema.py
@@ -2,6 +2,7 @@
Loading/saving NWB Schema yaml files
"""
+import warnings
from pathlib import Path
from pprint import pprint
from typing import Optional
@@ -70,6 +71,7 @@ def load_namespace_adapter(
namespace: Path | NamespaceRepo | Namespaces,
path: Optional[Path] = None,
version: Optional[str] = None,
+ imported: Optional[list[NamespacesAdapter]] = None,
) -> NamespacesAdapter:
"""
Load all schema referenced by a namespace file
@@ -81,6 +83,8 @@ def load_namespace_adapter(
version (str): Optional: tag or commit to check out namespace is a
:class:`.NamespaceRepo`. If ``None``, use ``HEAD`` if not already checked out,
or otherwise use whatever version is already checked out.
+ imported (list[:class:`.NamespacesAdapter`]): Optional: override discovered imports
+ with already-loaded namespaces adapters
Returns:
:class:`.NamespacesAdapter`
@@ -110,17 +114,56 @@ def load_namespace_adapter(
for ns in namespaces.namespaces:
for schema in ns.schema_:
if schema.source is None:
- # this is normal, we'll resolve later
- continue
- yml_file = (path / schema.source).resolve()
- sch.append(load_schema_file(yml_file))
+ if imported is None and schema.namespace == "hdmf-common" and ns.name == "core":
+ # special case - hdmf-common is imported by name without location or version,
+ # so to get the correct version we have to handle it separately
+ imported = _resolve_hdmf(namespace, path)
+ if imported is not None:
+ imported = [imported]
+ else:
+ continue
+ else:
+ yml_file = (path / schema.source).resolve()
+ sch.append(load_schema_file(yml_file))
- adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
+ if imported is not None:
+ adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch, imported=imported)
+ else:
+ adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
return adapter
-def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0") -> NamespacesAdapter:
+def _resolve_hdmf(
+ namespace: Path | NamespaceRepo | Namespaces, path: Optional[Path] = None
+) -> Optional[NamespacesAdapter]:
+ if path is None and isinstance(namespace, Namespaces):
+ # can't get any more information from already-loaded namespaces without a path
+ return None
+
+ if isinstance(namespace, NamespaceRepo):
+ # easiest route is if we got a NamespaceRepo
+ if namespace.name == "core":
+ hdmf_path = (path / namespace.imports["hdmf-common"]).resolve()
+ return load_namespace_adapter(namespace=hdmf_path)
+ # otherwise the hdmf-common adapter itself, and it loads common
+ else:
+ return None
+ elif path is not None:
+ # otherwise try and get it from relative paths
+ # pretty much a hack, but hey we are compensating for absence of versioning system here
+ maybe_repo_root = path / NWB_CORE_REPO.imports["hdmf-common"]
+ if maybe_repo_root.exists():
+ return load_namespace_adapter(namespace=maybe_repo_root)
+ warnings.warn(
+ f"Could not locate hdmf-common from namespace {namespace} and path {path}", stacklevel=1
+ )
+ return None
+
+
+def load_nwb_core(
+ core_version: str = "2.7.0", hdmf_version: str = "1.8.0", hdmf_only: bool = False
+) -> NamespacesAdapter:
"""
Convenience function for loading the NWB core schema + hdmf-common as a namespace adapter.
@@ -136,14 +179,16 @@ def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0") -> N
Args:
core_version (str): an entry in :attr:`.NWB_CORE_REPO.versions`
hdmf_version (str): an entry in :attr:`.NWB_CORE_REPO.versions`
+ hdmf_only (bool): Only return the hdmf common schema
Returns:
"""
# First get hdmf-common:
hdmf_schema = load_namespace_adapter(HDMF_COMMON_REPO, version=hdmf_version)
- schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version)
-
- schema.imported.append(hdmf_schema)
+ if hdmf_only:
+ schema = hdmf_schema
+ else:
+ schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version, imported=[hdmf_schema])
return schema
diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py
index 7bb68c4..c199062 100644
--- a/nwb_linkml/src/nwb_linkml/lang_elements.py
+++ b/nwb_linkml/src/nwb_linkml/lang_elements.py
@@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import (
TypeDefinition,
)
-from nwb_linkml.maps import flat_to_linkml, flat_to_np
+from nwb_linkml.maps import flat_to_linkml
def _make_dtypes() -> List[TypeDefinition]:
@@ -27,12 +27,15 @@ def _make_dtypes() -> List[TypeDefinition]:
if nwbtype.startswith("uint"):
amin = 0
- np_type = flat_to_np[nwbtype]
+ # FIXME: Restore numpy types when we wrap them :)
+ # np_type = flat_to_np[nwbtype]
- repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None
+ # repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None
atype = TypeDefinition(
- name=nwbtype, minimum_value=amin, typeof=linkmltype, repr=repr_string
+ name=nwbtype,
+ minimum_value=amin,
+ typeof=linkmltype, # repr=repr_string
)
DTypeTypes.append(atype)
return DTypeTypes
diff --git a/nwb_linkml/src/nwb_linkml/logging.py b/nwb_linkml/src/nwb_linkml/logging.py
new file mode 100644
index 0000000..35e4425
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/logging.py
@@ -0,0 +1,100 @@
+"""
+Logging factory and handlers
+"""
+
+import logging
+from logging.handlers import RotatingFileHandler
+from pathlib import Path
+from typing import Optional, Union
+
+from rich.logging import RichHandler
+
+from nwb_linkml.config import LOG_LEVELS, Config
+
+
+def init_logger(
+ name: str,
+ log_dir: Union[Optional[Path], bool] = None,
+ level: Optional[LOG_LEVELS] = None,
+ file_level: Optional[LOG_LEVELS] = None,
+ log_file_n: Optional[int] = None,
+ log_file_size: Optional[int] = None,
+) -> logging.Logger:
+ """
+ Make a logger.
+
+ Log to a set of rotating files in the ``log_dir`` according to ``name`` ,
+ as well as using the :class:`~rich.RichHandler` for pretty-formatted stdout logs.
+
+ Args:
+ name (str): Name of this logger. Ideally names are hierarchical
+ and indicate what they are logging for, eg. ``miniscope_io.sdcard``
+ and don't contain metadata like timestamps, etc. (which are in the logs)
+ log_dir (:class:`pathlib.Path`): Directory to store file-based logs in. If ``None``,
+ get from :class:`.Config`. If ``False`` , disable file logging.
+ level (:class:`.LOG_LEVELS`): Level to use for stdout logging. If ``None`` ,
+ get from :class:`.Config`
+ file_level (:class:`.LOG_LEVELS`): Level to use for file-based logging.
+ If ``None`` , get from :class:`.Config`
+ log_file_n (int): Number of rotating file logs to use.
+ If ``None`` , get from :class:`.Config`
+ log_file_size (int): Maximum size of logfiles before rotation.
+ If ``None`` , get from :class:`.Config`
+
+ Returns:
+ :class:`logging.Logger`
+ """
+ config = Config()
+ if log_dir is None:
+ log_dir = config.log_dir
+ if level is None:
+ level = config.logs.level_stdout
+ if file_level is None:
+ file_level = config.logs.level_file
+ if log_file_n is None:
+ log_file_n = config.logs.file_n
+ if log_file_size is None:
+ log_file_size = config.logs.file_size
+
+ if not name.startswith("nwb_linkml"):
+ name = "nwb_linkml." + name
+
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ # Add handlers for stdout and file
+ if log_dir is not False:
+ logger.addHandler(_file_handler(name, file_level, log_dir, log_file_n, log_file_size))
+
+ logger.addHandler(_rich_handler())
+
+ return logger
+
+
+def _file_handler(
+ name: str,
+ file_level: LOG_LEVELS,
+ log_dir: Path,
+ log_file_n: int = 5,
+ log_file_size: int = 2**22,
+) -> RotatingFileHandler:
+ # See init_logger for arg docs
+
+ filename = Path(log_dir) / ".".join([name, "log"])
+ file_handler = RotatingFileHandler(
+ str(filename), mode="a", maxBytes=log_file_size, backupCount=log_file_n
+ )
+ file_formatter = logging.Formatter("[%(asctime)s] %(levelname)s [%(name)s]: %(message)s")
+ file_handler.setLevel(file_level)
+ file_handler.setFormatter(file_formatter)
+ return file_handler
+
+
+def _rich_handler() -> RichHandler:
+ rich_handler = RichHandler(rich_tracebacks=True, markup=True)
+ rich_formatter = logging.Formatter(
+ "[bold green]\[%(name)s][/bold green] %(message)s",
+ datefmt="[%y-%m-%dT%H:%M:%S]",
+ )
+ rich_handler.setFormatter(rich_formatter)
+ return rich_handler
diff --git a/nwb_linkml/src/nwb_linkml/maps/__init__.py b/nwb_linkml/src/nwb_linkml/maps/__init__.py
index cb7f329..8b01447 100644
--- a/nwb_linkml/src/nwb_linkml/maps/__init__.py
+++ b/nwb_linkml/src/nwb_linkml/maps/__init__.py
@@ -2,7 +2,7 @@
Mapping from one domain to another
"""
-from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np, flat_to_nptyping
+from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np
from nwb_linkml.maps.map import Map
from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP
@@ -14,5 +14,4 @@ __all__ = [
"Map",
"flat_to_linkml",
"flat_to_np",
- "flat_to_nptyping",
]
diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py
index 9a7756f..d618dbe 100644
--- a/nwb_linkml/src/nwb_linkml/maps/dtype.py
+++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py
@@ -3,11 +3,12 @@ Dtype mappings
"""
from datetime import datetime
-from typing import Any, Type
+from typing import Any
-import nptyping
import numpy as np
+from nwb_schema_language import CompoundDtype, DTypeType, FlatDtype, ReferenceDtype
+
flat_to_linkml = {
"float": "float",
"float32": "float",
@@ -38,37 +39,6 @@ flat_to_linkml = {
Map between the flat data types and the simpler linkml base types
"""
-flat_to_nptyping = {
- "float": "Float",
- "float32": "Float32",
- "double": "Double",
- "float64": "Float64",
- "long": "LongLong",
- "int64": "Int64",
- "int": "Int",
- "int32": "Int32",
- "int16": "Int16",
- "short": "Short",
- "int8": "Int8",
- "uint": "UInt",
- "uint32": "UInt32",
- "uint16": "UInt16",
- "uint8": "UInt8",
- "uint64": "UInt64",
- "numeric": "Number",
- "text": "String",
- "utf": "Unicode",
- "utf8": "Unicode",
- "utf_8": "Unicode",
- "string": "Unicode",
- "str": "Unicode",
- "ascii": "String",
- "bool": "Bool",
- "isodatetime": "Datetime64",
- "AnyType": "Any",
- "object": "Object",
-}
-
flat_to_np = {
"float": float,
"float32": np.float32,
@@ -130,10 +100,9 @@ np_to_python = {
np.float64,
np.single,
np.double,
- np.float_,
)
},
- **{n: str for n in (np.character, np.str_, np.string_, np.unicode_)},
+ **{n: str for n in (np.character, np.str_)},
}
allowed_precisions = {
@@ -173,15 +142,32 @@ https://github.com/hdmf-dev/hdmf/blob/ddc842b5c81d96e0b957b96e88533b16c137e206/s
"""
-def struct_from_dtype(dtype: np.dtype) -> Type[nptyping.Structure]:
+def handle_dtype(dtype: DTypeType | None) -> str:
"""
- Create a nptyping Structure from a compound numpy dtype
+ Get the string form of a dtype
- nptyping structures have the form::
-
- Structure["name: Str, age: Int"]
+ Args:
+ dtype (:class:`.DTypeType`): Dtype to stringify
+ Returns:
+ str
"""
- struct_pieces = [f"{k}: {flat_to_nptyping[v[0].name]}" for k, v in dtype.fields.items()]
- struct_dtype = ", ".join(struct_pieces)
- return nptyping.Structure[struct_dtype]
+ if isinstance(dtype, ReferenceDtype):
+ return dtype.target_type
+ elif dtype is None or dtype == []:
+ # Some ill-defined datasets are "abstract" despite that not being in the schema language
+ return "AnyType"
+ elif isinstance(dtype, FlatDtype):
+ return dtype.value
+ elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype):
+ # there is precisely one class that uses compound dtypes:
+ # TimeSeriesReferenceVectorData
+ # compoundDtypes are able to define a ragged table according to the schema
+ # but are used in this single case equivalently to attributes.
+ # so we'll... uh... treat them as slots.
+ # TODO
+ return "AnyType"
+
+ else:
+ # flat dtype
+ return dtype
diff --git a/nwb_linkml/src/nwb_linkml/maps/hdf5.py b/nwb_linkml/src/nwb_linkml/maps/hdf5.py
index 8ebfd85..a7b052f 100644
--- a/nwb_linkml/src/nwb_linkml/maps/hdf5.py
+++ b/nwb_linkml/src/nwb_linkml/maps/hdf5.py
@@ -23,7 +23,6 @@ from pydantic import BaseModel, ConfigDict, Field
from nwb_linkml.annotations import unwrap_optional
from nwb_linkml.maps import Map
-from nwb_linkml.maps.hdmf import dynamictable_to_model
from nwb_linkml.types.hdf5 import HDF5_Path
if sys.version_info.minor >= 11:
@@ -234,63 +233,64 @@ class PruneEmpty(HDF5Map):
return H5ReadResult.model_construct(path=src.path, source=src, completed=True)
-class ResolveDynamicTable(HDF5Map):
- """
- Handle loading a dynamic table!
-
- Dynamic tables are sort of odd in that their models don't include their fields
- (except as a list of strings in ``colnames`` ),
- so we need to create a new model that includes fields for each column,
- and then we include the datasets as :class:`~numpydantic.interface.hdf5.H5ArrayPath`
- objects which lazy load the arrays in a thread/process safe way.
-
- This map also resolves the child elements,
- indicating so by the ``completes`` field in the :class:`.ReadResult`
- """
-
- phase = ReadPhases.read
- priority = 1
-
- @classmethod
- def check(
- cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
- ) -> bool:
- if src.h5_type == "dataset":
- return False
- if "neurodata_type" in src.attrs:
- if src.attrs["neurodata_type"] == "DynamicTable":
- return True
- # otherwise, see if it's a subclass
- model = provider.get_class(src.attrs["namespace"], src.attrs["neurodata_type"])
- # just inspect the MRO as strings rather than trying to check subclasses because
- # we might replace DynamicTable in the future, and there isn't a stable DynamicTable
- # class to inherit from anyway because of the whole multiple versions thing
- parents = [parent.__name__ for parent in model.__mro__]
- return "DynamicTable" in parents
- else:
- return False
-
- @classmethod
- def apply(
- cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
- ) -> H5ReadResult:
- with h5py.File(src.h5f_path, "r") as h5f:
- obj = h5f.get(src.path)
-
- # make a populated model :)
- base_model = provider.get_class(src.namespace, src.neurodata_type)
- model = dynamictable_to_model(obj, base=base_model)
-
- completes = [HDF5_Path(child.name) for child in obj.values()]
-
- return H5ReadResult(
- path=src.path,
- source=src,
- result=model,
- completes=completes,
- completed=True,
- applied=["ResolveDynamicTable"],
- )
+#
+# class ResolveDynamicTable(HDF5Map):
+# """
+# Handle loading a dynamic table!
+#
+# Dynamic tables are sort of odd in that their models don't include their fields
+# (except as a list of strings in ``colnames`` ),
+# so we need to create a new model that includes fields for each column,
+# and then we include the datasets as :class:`~numpydantic.interface.hdf5.H5ArrayPath`
+# objects which lazy load the arrays in a thread/process safe way.
+#
+# This map also resolves the child elements,
+# indicating so by the ``completes`` field in the :class:`.ReadResult`
+# """
+#
+# phase = ReadPhases.read
+# priority = 1
+#
+# @classmethod
+# def check(
+# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
+# ) -> bool:
+# if src.h5_type == "dataset":
+# return False
+# if "neurodata_type" in src.attrs:
+# if src.attrs["neurodata_type"] == "DynamicTable":
+# return True
+# # otherwise, see if it's a subclass
+# model = provider.get_class(src.attrs["namespace"], src.attrs["neurodata_type"])
+# # just inspect the MRO as strings rather than trying to check subclasses because
+# # we might replace DynamicTable in the future, and there isn't a stable DynamicTable
+# # class to inherit from anyway because of the whole multiple versions thing
+# parents = [parent.__name__ for parent in model.__mro__]
+# return "DynamicTable" in parents
+# else:
+# return False
+#
+# @classmethod
+# def apply(
+# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
+# ) -> H5ReadResult:
+# with h5py.File(src.h5f_path, "r") as h5f:
+# obj = h5f.get(src.path)
+#
+# # make a populated model :)
+# base_model = provider.get_class(src.namespace, src.neurodata_type)
+# model = dynamictable_to_model(obj, base=base_model)
+#
+# completes = [HDF5_Path(child.name) for child in obj.values()]
+#
+# return H5ReadResult(
+# path=src.path,
+# source=src,
+# result=model,
+# completes=completes,
+# completed=True,
+# applied=["ResolveDynamicTable"],
+# )
class ResolveModelGroup(HDF5Map):
diff --git a/nwb_linkml/src/nwb_linkml/maps/hdmf.py b/nwb_linkml/src/nwb_linkml/maps/hdmf.py
deleted file mode 100644
index b2c552d..0000000
--- a/nwb_linkml/src/nwb_linkml/maps/hdmf.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
-Mapping functions for handling HDMF classes like DynamicTables
-"""
-
-from typing import Any, List, Optional, Type
-
-import dask.array as da
-import h5py
-import numpy as np
-from numpydantic import NDArray
-from numpydantic.interface.hdf5 import H5ArrayPath
-from pydantic import BaseModel, create_model
-
-from nwb_linkml.maps.dtype import struct_from_dtype
-from nwb_linkml.types.hdf5 import HDF5_Path
-
-
-def model_from_dynamictable(group: h5py.Group, base: Optional[BaseModel] = None) -> Type[BaseModel]:
- """
- Create a pydantic model from a dynamic table
- """
- colnames = group.attrs["colnames"]
- types = {}
- for col in colnames:
-
- nptype = group[col].dtype
- nptype = struct_from_dtype(nptype) if nptype.type == np.void else nptype.type
-
- type_ = Optional[NDArray[Any, nptype]]
-
- # FIXME: handling nested column types that appear only in some versions?
- # types[col] = (List[type_ | None], ...)
- types[col] = (type_, None)
-
- model = create_model(group.name.split("/")[-1], **types, __base__=base)
- return model
-
-
-def dynamictable_to_model(
- group: h5py.Group,
- model: Optional[Type[BaseModel]] = None,
- base: Optional[Type[BaseModel]] = None,
-) -> BaseModel:
- """
- Instantiate a dynamictable model
-
- Calls :func:`.model_from_dynamictable` if ``model`` is not provided.
- """
- if model is None:
- model = model_from_dynamictable(group, base)
-
- items = {}
- for col, col_type in model.model_fields.items():
- if col not in group:
- if col in group.attrs:
- items[col] = group.attrs[col]
- continue
-
- if col_type.annotation is HDF5_Path:
- items[col] = [HDF5_Path(group[d].name) for d in group[col][:]]
- else:
- try:
- items[col] = da.from_array(group[col])
- except NotImplementedError:
- items[col] = H5ArrayPath(file=group.file.filename, path=group[col].name)
-
- return model.model_construct(hdf5_path=group.name, name=group.name.split("/")[-1], **items)
-
-
-def dereference_reference_vector(dset: h5py.Dataset, data: Optional[List[Any]]) -> List:
- """
- Given a compound dataset with indices, counts, and object references, dereference to values
-
- Data is of the form
- (idx_start, count, target)
- """
- # assume all these references are to the same target
- # and the index is in the 3rd position
- if data is None:
- data = dset[:]
-
- target = dset.parent.get(data[0][-1])
- res = [target[d[0] : d[0] + d[1]] for d in data]
- return res
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py
index 4bc6c35..6cb19f8 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -83,15 +92,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -130,10 +139,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -143,12 +157,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -177,19 +191,21 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -212,11 +228,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -241,7 +261,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -260,9 +280,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py
index e574c6b..095dec1 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py
index 1d43d1b..0456ec3 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
index 74b8d44..c946ad9 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_2_0.core_nwb_device import Device
from ...core.v2_2_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -108,37 +118,47 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,10 +187,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -179,24 +199,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,7 +262,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -247,7 +277,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -256,7 +286,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -277,16 +312,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -298,7 +342,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -313,7 +357,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -328,7 +372,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -344,14 +388,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -367,9 +420,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -388,7 +453,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -397,7 +462,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -406,6 +471,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -424,17 +498,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py
index d802c37..ada000f 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py
@@ -15,9 +15,9 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
-from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorIndex, VectorData
from ...core.v2_2_0.core_nwb_base import TimeSeries
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "2.2.0"
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -173,20 +190,22 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
- )
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py
index cf1adbd..548cc3b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_2_0.core_nwb_epoch import TimeIntervals
from ...core.v2_2_0.core_nwb_misc import Units
from ...core.v2_2_0.core_nwb_device import Device
from ...core.v2_2_0.core_nwb_ogen import OptogeneticStimulusSite
@@ -22,6 +21,7 @@ from ...core.v2_2_0.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorData, VectorIndex
from ...core.v2_2_0.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_2_0.core_nwb_epoch import TimeIntervals
metamodel_version = "None"
version = "2.2.0"
@@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -98,11 +107,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.1.0"] = Field(
+ "2.1.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.1.0", "ifabsent": "string(2.1.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -116,11 +126,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -158,19 +168,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -315,7 +315,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
@@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -357,7 +357,7 @@ class Subject(NWBContainer):
)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@@ -430,7 +430,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@@ -451,7 +451,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -487,7 +487,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -496,14 +496,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -516,7 +514,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -544,13 +542,43 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
+GeneralSourceScript.model_rebuild()
Subject.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py
index 7eb4679..86cbcb4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_2_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_0.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,39 +294,51 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -316,31 +367,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -369,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -408,31 +476,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -461,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -484,11 +569,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -507,11 +595,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -530,11 +621,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -553,11 +645,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -576,11 +671,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -599,11 +697,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -622,11 +723,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -640,31 +742,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -693,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -726,6 +843,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -738,7 +864,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -754,17 +880,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py
index fa26d72..28ff7b4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +80,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +103,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +126,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +151,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +168,26 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +218,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +237,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +265,26 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,13 +312,12 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
orientation: Optional[str] = Field(
@@ -299,11 +326,11 @@ class OpticalSeries(ImageSeries):
)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -316,21 +343,26 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -358,26 +390,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py
index 58ceb2e..0e16f74 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,21 +312,35 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -330,11 +369,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -361,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -370,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -384,24 +424,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -428,7 +466,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -437,61 +480,80 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -517,13 +579,11 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py
index cd3c8ac..2bbed5f 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_2_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_0.core_nwb_device import Device
metamodel_version = "None"
version = "2.2.0"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py
index 9ae5919..5321376 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
BeforeValidator,
)
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTableRegion, DynamicTable
+from ...core.v2_2_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
from ...core.v2_2_0.core_nwb_base import (
TimeSeriesStartingTime,
@@ -44,6 +45,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -69,7 +79,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -109,24 +119,30 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
- Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height"], np.float32],
- ]
+ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -139,21 +155,26 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -182,31 +203,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -233,7 +263,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -248,7 +278,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -263,7 +293,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
@@ -280,8 +310,8 @@ class ImagingPlane(NWBContainer):
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
- imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@@ -306,6 +336,15 @@ class ImagingPlane(NWBContainer):
optical_channel: OpticalChannel = Field(
..., description="""An optical channel used to record from an imaging plane."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ImagingPlaneManifold(ConfiguredBaseModel):
@@ -321,18 +360,20 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* height, * width, 3 x_y_z"], np.float32],
- NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32],
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
]
] = Field(None)
@@ -350,10 +391,12 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
},
)
- unit: Optional[str] = Field(
- None, description="""Measurement units for origin_coords. The default value is 'meters'."""
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -381,10 +424,12 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
},
)
- unit: Optional[str] = Field(
- None, description="""Measurement units for grid_spacing. The default value is 'meters'."""
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -408,9 +453,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -422,7 +465,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[NWBDataInterface]] = Field(
+ value: Optional[List[NWBDataInterface]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
index ee2356f..3ee80c2 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,14 +105,12 @@ class RetinotopyMap(NWBData):
)
name: str = Field(...)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -121,22 +128,18 @@ class AxisMap(RetinotopyMap):
)
name: str = Field(...)
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
class RetinotopyImage(GrayscaleImage):
@@ -149,29 +152,25 @@ class RetinotopyImage(GrayscaleImage):
)
name: str = Field(...)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- resolution: Optional[np.float32] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -193,35 +192,60 @@ class ImagingRetinotopy(NWBDataInterface):
...,
description="""Phase response to stimulus on the first measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_1_power_map: Named[Optional[AxisMap]] = Field(
None,
description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_2_phase_map: Named[AxisMap] = Field(
...,
description="""Phase response to stimulus on the second measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_2_power_map: Named[Optional[AxisMap]] = Field(
None,
description="""Power response to stimulus on the second measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
sign_map: Named[RetinotopyMap] = Field(
...,
description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_descriptions: NDArray[Shape["2 num_axes"], str] = Field(
@@ -241,7 +265,12 @@ class ImagingRetinotopy(NWBDataInterface):
...,
description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -262,32 +291,26 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
}
},
)
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- resolution: Optional[np.float32] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py
index e2d169d..e8892b9 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py
@@ -128,11 +128,12 @@ from ...core.v2_2_0.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
+ GeneralSourceScript,
Subject,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
)
from ...core.v2_2_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries
@@ -154,6 +155,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py
index df1e7e7..45c2131 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -83,15 +92,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -130,10 +139,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -143,12 +157,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -177,19 +191,21 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -212,11 +228,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -241,7 +261,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -260,9 +280,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py
index 143813d..04c20b4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py
index 83d2f3c..bc309fc 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
index c098784..0a13c81 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_2_1.core_nwb_device import Device
from ...core.v2_2_1.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -108,37 +118,47 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,10 +187,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -179,24 +199,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,7 +262,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -247,7 +277,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -256,7 +286,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -277,16 +312,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -298,7 +342,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -313,7 +357,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -328,7 +372,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -344,14 +388,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -367,9 +420,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -388,7 +453,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -397,7 +462,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -406,6 +471,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -424,17 +498,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py
index ad01523..92fd747 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py
@@ -15,9 +15,9 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
-from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorIndex, VectorData
from ...core.v2_2_1.core_nwb_base import TimeSeries
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "2.2.1"
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -173,20 +190,22 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
- )
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py
index 3faf47c..06fdd52 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_2_1.core_nwb_epoch import TimeIntervals
from ...core.v2_2_1.core_nwb_misc import Units
from ...core.v2_2_1.core_nwb_device import Device
from ...core.v2_2_1.core_nwb_ogen import OptogeneticStimulusSite
@@ -22,6 +21,7 @@ from ...core.v2_2_1.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorData, VectorIndex
from ...core.v2_2_1.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_2_1.core_nwb_epoch import TimeIntervals
metamodel_version = "None"
version = "2.2.1"
@@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -98,11 +107,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.2.1"] = Field(
+ "2.2.1",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.2.1", "ifabsent": "string(2.2.1)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -116,11 +126,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -158,19 +168,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -315,7 +315,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
@@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -357,7 +357,7 @@ class Subject(NWBContainer):
)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@@ -430,7 +430,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@@ -451,7 +451,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -487,7 +487,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -496,14 +496,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -516,7 +514,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -544,13 +542,43 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
+GeneralSourceScript.model_rebuild()
Subject.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py
index 3b96bf5..d004723 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_2_1.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_1.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,39 +294,51 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -316,31 +367,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -369,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -408,31 +476,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -461,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -484,11 +569,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -507,11 +595,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -530,11 +621,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -553,11 +645,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -576,11 +671,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -599,11 +697,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -622,11 +723,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -640,31 +742,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -693,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -726,6 +843,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -738,7 +864,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -754,17 +880,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py
index f7b0d84..3322ff3 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +80,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +103,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +126,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +151,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +168,26 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +218,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +237,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +265,26 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,13 +312,12 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
orientation: Optional[str] = Field(
@@ -299,11 +326,11 @@ class OpticalSeries(ImageSeries):
)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -316,21 +343,26 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -358,26 +390,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py
index 16f9bea..cf92403 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,21 +312,35 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -330,11 +369,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -361,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -370,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -384,24 +424,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -428,7 +466,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -437,61 +480,80 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -517,13 +579,11 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py
index 7a99546..07d8693 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_2_1.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_1.core_nwb_device import Device
metamodel_version = "None"
version = "2.2.1"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py
index 6b4e114..587b5ee 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py
@@ -17,6 +17,7 @@ from pydantic import (
BeforeValidator,
)
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTableRegion, DynamicTable
+from ...core.v2_2_1.core_nwb_device import Device
from numpydantic import NDArray, Shape
from ...core.v2_2_1.core_nwb_base import (
TimeSeriesStartingTime,
@@ -44,6 +45,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -69,7 +79,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -109,24 +119,30 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
- Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height"], np.float32],
- ]
+ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -139,21 +155,26 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -182,31 +203,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -233,7 +263,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -248,7 +278,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -263,7 +293,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
@@ -280,8 +310,8 @@ class ImagingPlane(NWBContainer):
name: str = Field(...)
description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
- imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
indicator: str = Field(..., description="""Calcium indicator.""")
location: str = Field(
...,
@@ -306,6 +336,15 @@ class ImagingPlane(NWBContainer):
optical_channel: OpticalChannel = Field(
..., description="""An optical channel used to record from an imaging plane."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ImagingPlaneManifold(ConfiguredBaseModel):
@@ -321,18 +360,20 @@ class ImagingPlaneManifold(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* height, * width, 3 x_y_z"], np.float32],
- NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32],
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
]
] = Field(None)
@@ -350,10 +391,12 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
},
)
- unit: Optional[str] = Field(
- None, description="""Measurement units for origin_coords. The default value is 'meters'."""
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -381,10 +424,12 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
},
)
- unit: Optional[str] = Field(
- None, description="""Measurement units for grid_spacing. The default value is 'meters'."""
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field(
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -408,9 +453,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -422,7 +465,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[NWBDataInterface]] = Field(
+ value: Optional[List[NWBDataInterface]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
index 5fe79a1..eef6b41 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,14 +105,12 @@ class RetinotopyMap(NWBData):
)
name: str = Field(...)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -121,22 +128,18 @@ class AxisMap(RetinotopyMap):
)
name: str = Field(...)
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
class RetinotopyImage(GrayscaleImage):
@@ -149,29 +152,25 @@ class RetinotopyImage(GrayscaleImage):
)
name: str = Field(...)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- resolution: Optional[np.float32] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -193,35 +192,60 @@ class ImagingRetinotopy(NWBDataInterface):
...,
description="""Phase response to stimulus on the first measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_1_power_map: Named[Optional[AxisMap]] = Field(
None,
description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_2_phase_map: Named[AxisMap] = Field(
...,
description="""Phase response to stimulus on the second measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_2_power_map: Named[Optional[AxisMap]] = Field(
None,
description="""Power response to stimulus on the second measured axis.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
sign_map: Named[RetinotopyMap] = Field(
...,
description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
axis_descriptions: NDArray[Shape["2 num_axes"], str] = Field(
@@ -241,7 +265,12 @@ class ImagingRetinotopy(NWBDataInterface):
...,
description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -262,32 +291,26 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
}
},
)
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- resolution: Optional[np.float32] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py
index b798833..b5d693b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py
@@ -128,11 +128,12 @@ from ...core.v2_2_1.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
+ GeneralSourceScript,
Subject,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
)
from ...core.v2_2_1.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries
@@ -154,6 +155,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py
index 556fa21..4b73640 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -83,15 +92,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -130,10 +139,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -143,12 +157,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -177,19 +191,21 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -212,11 +228,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -241,7 +261,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -260,9 +280,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py
index 39a00bd..aa5631c 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py
index 8f59409..dd0ab6e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
index 08e515f..402ccda 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_2_2.core_nwb_device import Device
from ...core.v2_2_2.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -108,37 +118,47 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,10 +187,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -179,24 +199,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,7 +262,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -247,7 +277,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -256,7 +286,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -277,16 +312,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -298,7 +342,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -313,7 +357,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -328,7 +372,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -344,14 +388,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -367,9 +420,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -388,7 +453,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -397,7 +462,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -406,6 +471,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -424,17 +498,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py
index ec6ad30..215cca6 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -173,21 +190,23 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py
index 32fe49a..5d28191 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_2_2.core_nwb_epoch import TimeIntervals
from ...core.v2_2_2.core_nwb_misc import Units
from ...core.v2_2_2.core_nwb_device import Device
from ...core.v2_2_2.core_nwb_ogen import OptogeneticStimulusSite
@@ -22,6 +21,7 @@ from ...core.v2_2_2.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex
from ...core.v2_2_2.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_2_2.core_nwb_epoch import TimeIntervals
metamodel_version = "None"
version = "2.2.2"
@@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -98,11 +107,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.2.2"] = Field(
+ "2.2.2",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.2.2", "ifabsent": "string(2.2.2)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -116,11 +126,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -158,19 +168,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -315,7 +315,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
@@ -335,7 +335,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -357,7 +357,7 @@ class Subject(NWBContainer):
)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@@ -430,7 +430,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@@ -451,7 +451,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -487,7 +487,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -496,14 +496,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -516,7 +514,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -544,13 +542,43 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
+GeneralSourceScript.model_rebuild()
Subject.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py
index a7fd4fd..cb14508 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_2_2.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_2.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,39 +294,51 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -316,31 +367,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -369,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -408,31 +476,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -461,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -484,11 +569,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -507,11 +595,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -530,11 +621,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -553,11 +645,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -576,11 +671,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -599,11 +697,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -622,11 +723,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -640,31 +742,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -693,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -726,6 +843,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -738,7 +864,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -754,17 +880,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py
index 11c8e94..88c0781 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +80,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +103,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +126,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +151,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +168,26 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +218,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +237,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +265,26 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,24 +312,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -314,21 +341,26 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -356,26 +388,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py
index 7694e0c..6e8daf9 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,21 +312,35 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -330,11 +369,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -361,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -370,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -384,24 +424,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -428,7 +466,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -437,61 +480,80 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -517,14 +579,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py
index 9be92dd..e7823a4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_2_2.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_2.core_nwb_device import Device
metamodel_version = "None"
version = "2.2.2"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py
index 4a1b85f..88bb254 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py
@@ -16,8 +16,9 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
-from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion, DynamicTable
+from ...core.v2_2_2.core_nwb_device import Device
+from numpydantic import NDArray, Shape
from ...core.v2_2_2.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -44,6 +45,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -69,7 +79,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -109,24 +119,30 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
- Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height"], np.float32],
- ]
+ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -139,21 +155,26 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -182,31 +203,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -233,7 +263,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -248,7 +278,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -263,7 +293,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
@@ -278,10 +308,152 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[NWBContainer]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBContainer"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""")
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "x_y", "exact_cardinality": 2},
+ {"alias": "x_y_z", "exact_cardinality": 3},
+ ]
+ }
+ }
+ },
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "x_y", "exact_cardinality": 2},
+ {"alias": "x_y_z", "exact_cardinality": 3},
+ ]
+ }
+ }
+ },
+ )
+
+
+class OpticalChannel(NWBContainer):
+ """
+ An optical channel used to record from an imaging plane.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description or other notes about the channel.""")
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -293,7 +465,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[NWBDataInterface]] = Field(
+ value: Optional[List[NWBDataInterface]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}}
)
name: str = Field(...)
@@ -307,4 +479,8 @@ DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
+OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
index b1c56d8..e623a0b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py
index 395e23e..e9737e4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py
@@ -52,6 +52,10 @@ from ...core.v2_2_2.core_nwb_ophys import (
Fluorescence,
ImageSegmentation,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
+ OpticalChannel,
MotionCorrection,
)
from ...core.v2_2_2.core_nwb_device import Device
@@ -127,11 +131,12 @@ from ...core.v2_2_2.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
+ GeneralSourceScript,
Subject,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
)
from ...core.v2_2_2.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries
@@ -153,6 +158,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py
index 0cec2c7..1d0f436 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -83,15 +92,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -130,10 +139,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -143,12 +157,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -177,19 +191,21 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -212,11 +228,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -241,7 +261,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -260,9 +280,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py
index a3f0972..e095079 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py
index fc0ff49..ed9623b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
index b34ac7d..e5b05d6 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_2_4.core_nwb_device import Device
from ...core.v2_2_4.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -108,37 +118,47 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,10 +187,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -179,24 +199,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,7 +262,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -247,7 +277,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -256,7 +286,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -277,16 +312,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -298,7 +342,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -313,7 +357,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -328,7 +372,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -344,14 +388,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -367,9 +420,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -388,7 +453,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -397,7 +462,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -406,6 +471,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -424,17 +498,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py
index 1cb1305..e216685 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -173,21 +190,23 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py
index ac4135f..e70c2dc 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_2_4.core_nwb_epoch import TimeIntervals
from ...core.v2_2_4.core_nwb_misc import Units
from ...core.v2_2_4.core_nwb_device import Device
from ...core.v2_2_4.core_nwb_ogen import OptogeneticStimulusSite
@@ -16,6 +15,7 @@ from ...core.v2_2_4.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex
from ...core.v2_2_4.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_2_4.core_nwb_epoch import TimeIntervals
from ...core.v2_2_4.core_nwb_base import (
NWBData,
NWBContainer,
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -96,9 +105,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -114,11 +121,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.2.4"] = Field(
+ "2.2.4",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.2.4", "ifabsent": "string(2.2.4)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -132,11 +140,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -174,19 +182,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -272,7 +270,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -300,10 +298,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -318,7 +316,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -331,11 +329,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -354,12 +352,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -372,7 +370,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -381,7 +379,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -390,7 +388,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -399,7 +397,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@@ -408,7 +406,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -417,7 +415,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@@ -429,7 +427,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -438,7 +436,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -447,7 +445,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -456,7 +454,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -465,7 +463,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -474,14 +472,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -494,7 +490,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -522,6 +518,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -547,7 +572,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -575,9 +600,10 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py
index d7e6f39..98b68dc 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_2_4.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_4.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,39 +294,51 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -316,31 +367,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -369,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -408,31 +476,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -461,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -484,11 +569,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -507,11 +595,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -530,11 +621,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -553,11 +645,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -576,11 +671,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -599,11 +697,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -622,11 +723,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -640,31 +742,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -693,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -726,6 +843,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -738,7 +864,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -754,17 +880,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py
index 4bd8bd5..40370ff 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +80,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +103,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +126,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +151,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +168,26 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +218,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +237,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +265,26 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,24 +312,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -314,21 +341,26 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -356,26 +388,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py
index 4b5b92a..1a7a26a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,21 +312,35 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -330,11 +369,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -361,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -370,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -384,24 +424,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -428,7 +466,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -437,61 +480,80 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -517,14 +579,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py
index 06238c6..33f8506 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_2_4.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_4.core_nwb_device import Device
metamodel_version = "None"
version = "2.2.4"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py
index 76d1dae..54b8b6a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_2_4.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_2_4.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_2_4.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_2_4.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_2_4.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.2.4"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,24 +124,30 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
- Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height"], np.float32],
- ]
+ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -144,21 +160,26 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,31 +208,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,7 +268,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -253,7 +283,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -268,7 +298,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -292,7 +322,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -303,7 +338,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -315,14 +355,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -348,10 +395,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -374,13 +419,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -403,14 +458,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -429,10 +498,143 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "x_y", "exact_cardinality": 2},
+ {"alias": "x_y_z", "exact_cardinality": 3},
+ ]
+ }
+ }
+ },
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "x_y", "exact_cardinality": 2},
+ {"alias": "x_y_z", "exact_cardinality": 3},
+ ]
+ }
+ }
+ },
+ )
class OpticalChannel(NWBContainer):
@@ -446,9 +648,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -460,7 +660,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -483,6 +683,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -497,6 +706,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
index b204be8..af820b1 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py
index fc74ca5..d4744f0 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py
@@ -56,6 +56,9 @@ from ...core.v2_2_4.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -134,10 +137,11 @@ from ...core.v2_2_4.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
)
@@ -161,6 +165,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py
index defb8e9..8c121de 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -83,15 +92,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -130,10 +139,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -143,12 +157,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -177,19 +191,21 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -212,11 +228,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -241,7 +261,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -260,9 +280,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py
index 012e884..6b298ff 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py
index afc24d2..2b32c15 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
index 5f4bd2c..5869306 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_2_5.core_nwb_device import Device
from ...core.v2_2_5.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -108,37 +118,47 @@ class ElectricalSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,10 +187,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -179,24 +199,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,7 +262,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -247,7 +277,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -256,7 +286,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -277,16 +312,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -298,7 +342,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -313,7 +357,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -328,7 +372,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -344,14 +388,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -367,9 +420,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -388,7 +453,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -397,7 +462,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -406,6 +471,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -424,17 +498,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py
index e1b3da9..3bee6b5 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -173,21 +190,23 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py
index d2a1e8f..5f22cff 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_2_5.core_nwb_epoch import TimeIntervals
from ...core.v2_2_5.core_nwb_misc import Units
from ...core.v2_2_5.core_nwb_device import Device
from ...core.v2_2_5.core_nwb_ogen import OptogeneticStimulusSite
@@ -16,6 +15,7 @@ from ...core.v2_2_5.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex
from ...core.v2_2_5.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_2_5.core_nwb_epoch import TimeIntervals
from ...core.v2_2_5.core_nwb_base import (
NWBData,
NWBContainer,
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -96,9 +105,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -114,11 +121,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.2.5"] = Field(
+ "2.2.5",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.2.5", "ifabsent": "string(2.2.5)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -132,11 +140,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -174,19 +182,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -272,7 +270,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -300,10 +298,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -318,7 +316,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -331,11 +329,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -354,12 +352,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -372,7 +370,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -381,7 +379,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -390,7 +388,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -399,7 +397,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel.""",
json_schema_extra={
@@ -408,7 +406,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -417,7 +415,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering.""",
json_schema_extra={
@@ -429,7 +427,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -438,7 +436,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -447,7 +445,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -456,7 +454,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -465,7 +463,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -474,14 +472,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -494,7 +490,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -522,6 +518,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -547,7 +572,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -575,9 +600,10 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py
index 166ecb0..220fc73 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_2_5.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_5.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,39 +294,51 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -316,31 +367,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -369,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -408,31 +476,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -461,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -484,11 +569,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -507,11 +595,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -530,11 +621,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -553,11 +645,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -576,11 +671,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -599,11 +697,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -622,11 +723,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -640,31 +742,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -693,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -726,6 +843,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -738,7 +864,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -754,17 +880,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py
index b74228e..483dfd5 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +80,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +103,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +126,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +151,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +168,26 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +218,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +237,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +265,26 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,24 +312,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -314,21 +341,26 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -356,26 +388,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py
index 959d2df..4a2cdd6 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,21 +312,35 @@ class DecompositionSeries(TimeSeries):
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -330,11 +369,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -361,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -370,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -384,24 +424,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -428,7 +466,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -437,61 +480,80 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -517,14 +579,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py
index 5b95cba..ead9c1a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_2_5.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_2_5.core_nwb_device import Device
metamodel_version = "None"
version = "2.2.5"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py
index 4a695c9..4e95539 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_1_3.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_2_5.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_2_5.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_2_5.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_2_5.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_2_5.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.2.5"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,24 +124,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -144,21 +162,26 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,31 +210,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,7 +270,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -253,7 +285,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -268,7 +300,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -292,7 +324,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -303,7 +340,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -315,14 +357,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -348,10 +397,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -374,13 +421,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -403,14 +460,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -429,10 +500,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -446,9 +630,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -460,7 +642,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -483,6 +665,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -497,6 +688,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
index 4c3f758..916660c 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py
index fae01ff..ce33adb 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py
@@ -56,6 +56,9 @@ from ...core.v2_2_5.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -134,10 +137,11 @@ from ...core.v2_2_5.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
)
@@ -161,6 +165,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py
index d6fc9fa..4fd6a4a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py
@@ -29,6 +29,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -88,15 +97,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -135,10 +144,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -148,12 +162,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -182,23 +196,25 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -221,11 +237,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -250,7 +270,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -269,9 +289,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py
index b764c15..898519c 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py
index 0640dac..ec6a770 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
index 9cbedd2..72ca241 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_3_0.core_nwb_device import Device
from ...core.v2_3_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -112,37 +122,47 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -171,10 +191,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,24 +207,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -240,7 +270,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -255,7 +285,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -264,7 +294,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -285,16 +320,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -306,7 +350,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -321,7 +365,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -336,7 +380,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -352,14 +396,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -375,9 +428,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -396,7 +461,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -405,7 +470,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -414,6 +479,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -432,17 +506,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py
index 6457839..fd6ff0a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -170,21 +187,23 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py
index e4d19ca..4fbd77b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_3_0.core_nwb_epoch import TimeIntervals
from ...core.v2_3_0.core_nwb_misc import Units
from ...core.v2_3_0.core_nwb_device import Device
from ...core.v2_3_0.core_nwb_ogen import OptogeneticStimulusSite
@@ -16,6 +15,7 @@ from ...core.v2_3_0.core_nwb_ecephys import ElectrodeGroup
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData
from ...core.v2_3_0.core_nwb_icephys import IntracellularElectrode, SweepTable
+from ...core.v2_3_0.core_nwb_epoch import TimeIntervals
from ...core.v2_3_0.core_nwb_base import (
NWBData,
NWBContainer,
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -96,9 +105,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -114,11 +121,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.3.0"] = Field(
+ "2.3.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.3.0", "ifabsent": "string(2.3.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -132,11 +140,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -174,19 +182,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -272,7 +270,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -300,10 +298,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -318,7 +316,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -331,11 +329,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -354,12 +352,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -372,7 +370,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -381,7 +379,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -390,7 +388,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -399,7 +397,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@@ -408,7 +406,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -417,7 +415,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, np.float32] = Field(
+ filtering: VectorData[NDArray[Any, float]] = Field(
...,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@@ -429,7 +427,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -438,7 +436,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -447,7 +445,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -456,7 +454,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -465,7 +463,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -474,14 +472,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -491,7 +487,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -519,6 +515,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -544,7 +569,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -573,9 +598,10 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py
index 48af82c..95dc2af 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py
@@ -11,6 +11,7 @@ from ...core.v2_3_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_3_0.core_nwb_device import Device
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
from pydantic import (
BaseModel,
@@ -42,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -67,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -106,32 +116,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -160,11 +184,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -180,36 +204,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,9 +276,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -255,40 +294,53 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None,
+ stimulus_description: Literal["N/A"] = Field(
+ "N/A",
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -317,31 +369,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -370,9 +436,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -409,31 +478,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -462,9 +545,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -485,11 +571,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -508,11 +597,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -531,11 +623,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -554,11 +647,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -577,11 +673,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -600,11 +699,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -623,11 +725,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -641,31 +744,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -694,9 +811,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -727,6 +845,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -739,7 +866,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -755,17 +882,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py
index d54abe3..81ce140 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py
@@ -7,6 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from ...core.v2_3_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
from ...core.v2_3_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -28,6 +29,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +81,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +104,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +127,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -142,11 +152,11 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -159,21 +169,35 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -204,11 +228,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -223,13 +247,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -242,21 +275,35 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -284,24 +331,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -314,21 +360,35 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -356,26 +416,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py
index 45a4d8b..f0e9795 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,28 +312,47 @@ class DecompositionSeries(TimeSeries):
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
bands: DecompositionSeriesBands = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -337,11 +381,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -368,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -377,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -391,24 +436,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -432,7 +475,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -441,84 +489,115 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
+ waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
+ Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
+ }
+ },
+ )
)
waveforms_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
waveforms_index_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -541,14 +620,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py
index 71d202b..e77547e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_3_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_3_0.core_nwb_device import Device
metamodel_version = "None"
version = "2.3.0"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py
index a08cfb2..ee291fb 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_3_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_3_0.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_3_0.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.3.0"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,24 +124,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Optional[
Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, * z"], float],
]
] = Field(None, description="""Binary data representing images across frames.""")
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -144,21 +162,35 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,31 +219,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -238,7 +279,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -253,7 +294,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -268,7 +309,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -292,7 +333,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -303,7 +349,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -315,14 +366,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -345,10 +403,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -371,13 +427,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -400,14 +466,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -426,10 +506,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -443,9 +636,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -457,7 +648,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -480,6 +671,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -494,6 +694,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
index 6af3e96..85857c3 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py
index 807ed1c..6bb4f8d 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py
@@ -59,6 +59,9 @@ from ...core.v2_3_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -137,10 +140,11 @@ from ...core.v2_3_0.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
)
@@ -164,6 +168,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py
index fe433cd..9ec8413 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py
@@ -4,9 +4,22 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
@@ -29,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -48,6 +70,150 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
+ """
+ Mixin class for TimeSeriesReferenceVectorData -
+ very simple, just indexing the given timeseries object.
+
+ These shouldn't have additional fields in them, just the three columns
+ for index, span, and timeseries
+ """
+
+ idx_start: NDArray[Shape["*"], int]
+ count: NDArray[Shape["*"], int]
+ timeseries: NDArray
+
+ @model_validator(mode="after")
+ def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin":
+ """
+ Each of the three indexing columns must be the same length to work!
+ """
+ assert len(self.idx_start) == len(self.timeseries) == len(self.count), (
+ f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)},"
+ f" timeseries: {len(self.timeseries)}"
+ )
+ return self
+
+ def __len__(self) -> int:
+ """Since we have ensured equal length, just return idx_start"""
+ return len(self.idx_start)
+
+ @overload
+ def _slice_helper(self, item: int) -> slice: ...
+
+ @overload
+ def _slice_helper(self, item: slice) -> List[slice]: ...
+
+ def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]:
+ if isinstance(item, (int, np.integer)):
+ return slice(self.idx_start[item], self.idx_start[item] + self.count[item])
+ else:
+ starts = self.idx_start[item]
+ ends = starts + self.count[item]
+ return [slice(start, end) for start, end in zip(starts, ends)]
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+
+ if isinstance(item, (int, np.integer)):
+ return self.timeseries[item][self._slice_helper(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.idx_start)))
+ return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item]
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, must be an int, slice, or iterable"
+ )
+
+ def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+ if isinstance(key, (int, np.integer)):
+ self.timeseries[key][self._slice_helper(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.idx_start)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " target Timeseries object if you need more control"
+ )
+ for subitem, subvalue in zip(key, value):
+ self.timeseries[subitem][self._slice_helper(subitem)] = subvalue
+ else:
+ for subitem in key:
+ self.timeseries[subitem][self._slice_helper(subitem)] = value
+ else:
+ raise ValueError(
+ f"Dont know how to index with {key}, must be an int, slice, or iterable"
+ )
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -78,7 +244,7 @@ class NWBData(Data):
name: str = Field(...)
-class TimeSeriesReferenceVectorData(VectorData):
+class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
@@ -90,19 +256,23 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
- idx_start: np.int32 = Field(
+ idx_start: NDArray[Shape["*"], int] = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: np.int32 = Field(
+ count: NDArray[Shape["*"], int] = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: NDArray[Shape["*"], TimeSeries] = Field(
+ ...,
+ description="""The TimeSeries that this index applies to""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -122,15 +292,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -169,10 +339,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -182,12 +357,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -216,23 +391,25 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -255,11 +432,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -284,7 +465,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -303,9 +484,7 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py
index 8e859b8..98282c5 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,13 +142,14 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -148,7 +163,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -163,7 +178,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -178,7 +193,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -193,7 +208,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -208,7 +223,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -223,7 +238,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -238,7 +253,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py
index f59deb8..f54c25e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
index 1a25b07..de74f33 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_4_0.core_nwb_device import Device
from ...core.v2_4_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -112,37 +122,47 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -171,10 +191,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,24 +207,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -240,7 +270,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -255,7 +285,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -264,7 +294,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -285,16 +320,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -306,7 +350,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -321,7 +365,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -336,7 +380,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -352,14 +396,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -375,9 +428,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -396,7 +461,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -405,7 +470,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -414,6 +479,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -432,17 +506,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py
index 99ba895..31033ca 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,7 +136,12 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Optional[TimeIntervalsTimeseries] = Field(
@@ -137,17 +151,20 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -170,21 +187,23 @@ class TimeIntervalsTimeseries(VectorData):
"linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"}
},
)
- idx_start: Optional[np.int32] = Field(
+ idx_start: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: Optional[np.int32] = Field(
+ count: Optional[NDArray[Shape["*"], int]] = Field(
None,
description="""Number of data samples available in this time series, during this epoch.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: Optional[TimeSeries] = Field(
- None, description="""the TimeSeries that this index applies to."""
+ timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field(
+ None,
+ description="""the TimeSeries that this index applies to.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py
index c2c5d26..bc2132e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_4_0.core_nwb_epoch import TimeIntervals
from ...core.v2_4_0.core_nwb_misc import Units
from ...core.v2_4_0.core_nwb_device import Device
from ...core.v2_4_0.core_nwb_ogen import OptogeneticStimulusSite
@@ -24,6 +23,7 @@ from ...core.v2_4_0.core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
)
+from ...core.v2_4_0.core_nwb_epoch import TimeIntervals
from ...core.v2_4_0.core_nwb_base import (
NWBData,
NWBContainer,
@@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -104,9 +113,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -122,11 +129,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.4.0"] = Field(
+ "2.4.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.4.0", "ifabsent": "string(2.4.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -140,11 +148,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -182,19 +190,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -280,7 +278,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -308,10 +306,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -326,7 +324,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -339,11 +337,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -362,12 +360,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -380,7 +378,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: NDArray[Any, np.float32] = Field(
+ x: VectorData[NDArray[Any, float]] = Field(
...,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -389,7 +387,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: NDArray[Any, np.float32] = Field(
+ y: VectorData[NDArray[Any, float]] = Field(
...,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -398,7 +396,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: NDArray[Any, np.float32] = Field(
+ z: VectorData[NDArray[Any, float]] = Field(
...,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -407,7 +405,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: NDArray[Any, np.float32] = Field(
+ imp: VectorData[NDArray[Any, float]] = Field(
...,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@@ -416,7 +414,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -425,7 +423,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: NDArray[Any, str] = Field(
+ filtering: VectorData[NDArray[Any, str]] = Field(
...,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@@ -437,7 +435,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -446,7 +444,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -455,7 +453,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -464,7 +462,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -473,7 +471,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference used for this electrode.""",
json_schema_extra={
@@ -482,14 +480,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -499,7 +495,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -548,6 +544,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -573,7 +598,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -602,9 +627,10 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py
index b84fea1..10cce5d 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py
@@ -5,6 +5,7 @@ from enum import Enum
import re
import sys
import numpy as np
+from ...core.v2_4_0.core_nwb_device import Device
from ...core.v2_4_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -113,32 +123,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,11 +191,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -187,36 +211,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -245,9 +283,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -262,40 +301,53 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None,
+ stimulus_description: Literal["N/A"] = Field(
+ "N/A",
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -324,31 +376,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -377,9 +443,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -416,31 +485,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -469,9 +552,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -492,11 +578,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -515,11 +604,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -538,11 +630,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -561,11 +654,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -584,11 +680,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -607,11 +706,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -630,11 +732,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -648,31 +751,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -701,9 +818,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -734,6 +852,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -746,7 +873,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -762,17 +889,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -792,17 +922,24 @@ class IntracellularElectrodesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular electrode related metadata."] = Field(
+ "Table for storing intracellular electrode related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular electrode related metadata.",
+ "ifabsent": "string(Table for storing intracellular electrode related metadata.)",
+ }
+ },
)
electrode: List[IntracellularElectrode] = Field(
..., description="""Column for storing the reference to the intracellular electrode."""
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -822,21 +959,33 @@ class IntracellularStimuliTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular stimulus related metadata."] = Field(
+ "Table for storing intracellular stimulus related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular stimulus related metadata.",
+ "ifabsent": "string(Table for storing intracellular stimulus related metadata.)",
+ }
+ },
)
stimulus: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded stimulus for the recording (rows).""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -856,21 +1005,33 @@ class IntracellularResponsesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular response related metadata."] = Field(
+ "Table for storing intracellular response related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular response related metadata.",
+ "ifabsent": "string(Table for storing intracellular response related metadata.)",
+ }
+ },
)
response: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded response for the recording (rows)""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -898,9 +1059,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
}
},
)
- description: Optional[str] = Field(
- None,
+ description: Literal[
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording."
+ ] = Field(
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording.",
description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": (
+ "A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and "
+ "for storing metadata about the intracellular recording."
+ ),
+ "ifabsent": (
+ "string(A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and for "
+ "storing metadata about the intracellular recording.)"
+ ),
+ }
+ },
)
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
@@ -911,14 +1090,14 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
responses: IntracellularResponsesTable = Field(
..., description="""Table for storing intracellular response related metadata."""
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -954,17 +1133,20 @@ class SimultaneousRecordingsTable(DynamicTable):
...,
description="""Index dataset for the recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -987,14 +1169,14 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"}
},
)
- table: Optional[IntracellularRecordingsTable] = Field(
- None,
+ table: IntracellularRecordingsTable = Field(
+ ...,
description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1030,10 +1212,15 @@ class SequentialRecordingsTable(DynamicTable):
...,
description="""Index dataset for the simultaneous_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- stimulus_type: NDArray[Any, str] = Field(
+ stimulus_type: VectorData[NDArray[Any, str]] = Field(
...,
description="""The type of stimulus used for the sequential recording.""",
json_schema_extra={
@@ -1042,14 +1229,12 @@ class SequentialRecordingsTable(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1075,14 +1260,14 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SimultaneousRecordingsTable] = Field(
- None,
+ table: SimultaneousRecordingsTable = Field(
+ ...,
description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1115,17 +1300,20 @@ class RepetitionsTable(DynamicTable):
...,
description="""Index dataset for the sequential_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1151,14 +1339,14 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SequentialRecordingsTable] = Field(
- None,
+ table: SequentialRecordingsTable = Field(
+ ...,
description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1193,17 +1381,20 @@ class ExperimentalConditionsTable(DynamicTable):
...,
description="""Index dataset for the repetitions column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1226,14 +1417,14 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"}
},
)
- table: Optional[RepetitionsTable] = Field(
- None,
+ table: RepetitionsTable = Field(
+ ...,
description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py
index 1209210..c792b06 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py
@@ -7,6 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from ...core.v2_4_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
from ...core.v2_4_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
@@ -28,6 +29,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +81,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +104,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +127,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -141,13 +151,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -160,21 +169,35 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -205,11 +228,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -224,14 +247,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -244,21 +275,35 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -286,24 +331,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -316,21 +360,35 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -358,26 +416,40 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the frame in the referenced ImageSeries.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ indexed_timeseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py
index 941eed4..5a64e12 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,28 +312,47 @@ class DecompositionSeries(TimeSeries):
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
bands: DecompositionSeriesBands = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -337,11 +381,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -368,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -377,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -391,24 +436,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -432,7 +475,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -441,84 +489,115 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
+ waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
+ Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
+ }
+ },
+ )
)
waveforms_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
waveforms_index_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -541,14 +620,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py
index 2b145ce..d9184d4 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_4_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_4_0.core_nwb_device import Device
metamodel_version = "None"
version = "2.4.0"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py
index 62d96b8..da2c58f 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_4_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_4_0.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_4_0.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.4.0"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,25 +124,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -145,21 +162,35 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -188,31 +219,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -239,7 +279,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -254,7 +294,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -269,7 +309,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -293,7 +333,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -304,7 +349,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -316,14 +366,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -346,10 +403,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -372,13 +427,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -401,14 +466,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -427,10 +506,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -444,9 +636,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -458,7 +648,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -481,6 +671,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -495,6 +694,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
index aaad019..1b06207 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py
index f4b518c..ecb9186 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py
@@ -60,6 +60,9 @@ from ...core.v2_4_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -150,10 +153,11 @@ from ...core.v2_4_0.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
)
@@ -177,6 +181,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py
index a686488..aefea7a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py
@@ -8,13 +8,29 @@ import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+ Annotated,
+ Type,
+)
from pydantic import (
BaseModel,
ConfigDict,
Field,
RootModel,
field_validator,
+ model_validator,
ValidationInfo,
BeforeValidator,
)
@@ -37,6 +53,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -57,12 +82,155 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
+ """
+ Mixin class for TimeSeriesReferenceVectorData -
+ very simple, just indexing the given timeseries object.
+
+ These shouldn't have additional fields in them, just the three columns
+ for index, span, and timeseries
+ """
+
+ idx_start: NDArray[Shape["*"], int]
+ count: NDArray[Shape["*"], int]
+ timeseries: NDArray
+
+ @model_validator(mode="after")
+ def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin":
+ """
+ Each of the three indexing columns must be the same length to work!
+ """
+ assert len(self.idx_start) == len(self.timeseries) == len(self.count), (
+ f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)},"
+ f" timeseries: {len(self.timeseries)}"
+ )
+ return self
+
+ def __len__(self) -> int:
+ """Since we have ensured equal length, just return idx_start"""
+ return len(self.idx_start)
+
+ @overload
+ def _slice_helper(self, item: int) -> slice: ...
+
+ @overload
+ def _slice_helper(self, item: slice) -> List[slice]: ...
+
+ def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]:
+ if isinstance(item, (int, np.integer)):
+ return slice(self.idx_start[item], self.idx_start[item] + self.count[item])
+ else:
+ starts = self.idx_start[item]
+ ends = starts + self.count[item]
+ return [slice(start, end) for start, end in zip(starts, ends)]
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+
+ if isinstance(item, (int, np.integer)):
+ return self.timeseries[item][self._slice_helper(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.idx_start)))
+ return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item]
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, must be an int, slice, or iterable"
+ )
+
+ def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+ if isinstance(key, (int, np.integer)):
+ self.timeseries[key][self._slice_helper(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.idx_start)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " target Timeseries object if you need more control"
+ )
+ for subitem, subvalue in zip(key, value):
+ self.timeseries[subitem][self._slice_helper(subitem)] = subvalue
+ else:
+ for subitem in key:
+ self.timeseries[subitem][self._slice_helper(subitem)] = value
+ else:
+ raise ValueError(
+ f"Dont know how to index with {key}, must be an int, slice, or iterable"
+ )
+
+
ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -102,7 +270,7 @@ class NWBData(Data):
name: str = Field(...)
-class TimeSeriesReferenceVectorData(VectorData):
+class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
@@ -114,19 +282,23 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
- idx_start: np.int32 = Field(
+ idx_start: NDArray[Shape["*"], int] = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: np.int32 = Field(
+ count: NDArray[Shape["*"], int] = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: NDArray[Shape["*"], TimeSeries] = Field(
+ ...,
+ description="""The TimeSeries that this index applies to""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -146,15 +318,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -169,8 +341,14 @@ class ImageReferences(NWBData):
)
name: str = Field(...)
- image: List[Image] = Field(
- ..., description="""Ordered dataset of references to Image objects."""
+ value: List[Image] = Field(
+ ...,
+ description="""Ordered dataset of references to Image objects.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "reference"}}
+ }
+ },
)
@@ -208,10 +386,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -221,12 +404,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -255,27 +438,29 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- offset: Optional[np.float32] = Field(
+ offset: Optional[float] = Field(
None,
description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -298,11 +483,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -327,7 +516,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -346,15 +535,18 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
order_of_images: Named[Optional[ImageReferences]] = Field(
None,
description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py
index 4c8757c..86b4bc0 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,15 +142,16 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, 1 x"], np.number],
- NDArray[Shape["* num_times, 2 x_y"], np.number],
- NDArray[Shape["* num_times, 3 x_y_z"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, 1 x"], float],
+ NDArray[Shape["* num_times, 2 x_y"], float],
+ NDArray[Shape["* num_times, 3 x_y_z"], float],
]
] = Field(None)
@@ -150,7 +165,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -165,7 +180,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -180,7 +195,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -195,7 +210,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -210,7 +225,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -225,7 +240,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -240,7 +255,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py
index 53128f3..4b214a9 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
index f5f24cd..7c60b61 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_5_0.core_nwb_device import Device
from ...core.v2_5_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -112,37 +122,47 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -171,10 +191,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,24 +207,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -240,7 +270,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -255,7 +285,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -264,7 +294,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -285,16 +320,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -306,7 +350,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -321,7 +365,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -336,7 +380,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -352,14 +396,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -375,9 +428,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -396,7 +461,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -405,7 +470,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -414,6 +479,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -432,17 +506,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py
index e55c2ea..fd5f403 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,31 +136,44 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field(
None,
description="""An index into a TimeSeries object.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py
index fb4442a..ae0ce47 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_5_0.core_nwb_epoch import TimeIntervals
from ...core.v2_5_0.core_nwb_misc import Units
from ...core.v2_5_0.core_nwb_device import Device
from ...core.v2_5_0.core_nwb_ogen import OptogeneticStimulusSite
@@ -24,6 +23,7 @@ from ...core.v2_5_0.core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
)
+from ...core.v2_5_0.core_nwb_epoch import TimeIntervals
from ...core.v2_5_0.core_nwb_base import (
NWBData,
NWBContainer,
@@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -105,9 +114,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -123,11 +130,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.5.0"] = Field(
+ "2.5.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.5.0", "ifabsent": "string(2.5.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -141,11 +149,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -183,19 +191,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -283,7 +281,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -311,10 +309,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -329,7 +327,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -342,11 +340,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -365,12 +363,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -383,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: Optional[NDArray[Any, np.float32]] = Field(
+ x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -392,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: Optional[NDArray[Any, np.float32]] = Field(
+ y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -401,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: Optional[NDArray[Any, np.float32]] = Field(
+ z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -410,7 +408,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: Optional[NDArray[Any, np.float32]] = Field(
+ imp: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@@ -419,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -428,7 +426,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: Optional[NDArray[Any, str]] = Field(
+ filtering: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@@ -440,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -449,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -458,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -467,7 +465,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -476,7 +474,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""",
json_schema_extra={
@@ -485,14 +483,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -502,7 +498,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -551,6 +547,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -576,7 +601,7 @@ class Subject(NWBContainer):
age: Optional[str] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -605,9 +630,10 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py
index 6dfee22..4a7ccc2 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py
@@ -5,6 +5,7 @@ from enum import Enum
import re
import sys
import numpy as np
+from ...core.v2_5_0.core_nwb_device import Device
from ...core.v2_5_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -113,32 +123,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,11 +191,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -187,36 +211,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -245,9 +283,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -262,40 +301,53 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None,
+ stimulus_description: Literal["N/A"] = Field(
+ "N/A",
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -324,31 +376,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -377,9 +443,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -416,31 +485,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -469,9 +552,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -492,11 +578,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -515,11 +604,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -538,11 +630,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -561,11 +654,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -584,11 +680,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -607,11 +706,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -630,11 +732,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -648,31 +751,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -701,9 +818,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -735,6 +853,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -747,7 +874,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -763,17 +890,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -793,17 +923,24 @@ class IntracellularElectrodesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular electrode related metadata."] = Field(
+ "Table for storing intracellular electrode related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular electrode related metadata.",
+ "ifabsent": "string(Table for storing intracellular electrode related metadata.)",
+ }
+ },
)
electrode: List[IntracellularElectrode] = Field(
..., description="""Column for storing the reference to the intracellular electrode."""
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -823,21 +960,33 @@ class IntracellularStimuliTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular stimulus related metadata."] = Field(
+ "Table for storing intracellular stimulus related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular stimulus related metadata.",
+ "ifabsent": "string(Table for storing intracellular stimulus related metadata.)",
+ }
+ },
)
stimulus: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded stimulus for the recording (rows).""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -857,21 +1006,33 @@ class IntracellularResponsesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular response related metadata."] = Field(
+ "Table for storing intracellular response related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular response related metadata.",
+ "ifabsent": "string(Table for storing intracellular response related metadata.)",
+ }
+ },
)
response: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded response for the recording (rows)""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -899,9 +1060,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
}
},
)
- description: Optional[str] = Field(
- None,
+ description: Literal[
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording."
+ ] = Field(
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording.",
description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": (
+ "A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and "
+ "for storing metadata about the intracellular recording."
+ ),
+ "ifabsent": (
+ "string(A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and for "
+ "storing metadata about the intracellular recording.)"
+ ),
+ }
+ },
)
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
@@ -912,14 +1091,14 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
responses: IntracellularResponsesTable = Field(
..., description="""Table for storing intracellular response related metadata."""
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -955,17 +1134,20 @@ class SimultaneousRecordingsTable(DynamicTable):
...,
description="""Index dataset for the recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -988,14 +1170,14 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"}
},
)
- table: Optional[IntracellularRecordingsTable] = Field(
- None,
+ table: IntracellularRecordingsTable = Field(
+ ...,
description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1031,10 +1213,15 @@ class SequentialRecordingsTable(DynamicTable):
...,
description="""Index dataset for the simultaneous_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- stimulus_type: NDArray[Any, str] = Field(
+ stimulus_type: VectorData[NDArray[Any, str]] = Field(
...,
description="""The type of stimulus used for the sequential recording.""",
json_schema_extra={
@@ -1043,14 +1230,12 @@ class SequentialRecordingsTable(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1076,14 +1261,14 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SimultaneousRecordingsTable] = Field(
- None,
+ table: SimultaneousRecordingsTable = Field(
+ ...,
description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1116,17 +1301,20 @@ class RepetitionsTable(DynamicTable):
...,
description="""Index dataset for the sequential_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1152,14 +1340,14 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SequentialRecordingsTable] = Field(
- None,
+ table: SequentialRecordingsTable = Field(
+ ...,
description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1194,17 +1382,20 @@ class ExperimentalConditionsTable(DynamicTable):
...,
description="""Index dataset for the repetitions column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1227,14 +1418,14 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"}
},
)
- table: Optional[RepetitionsTable] = Field(
- None,
+ table: RepetitionsTable = Field(
+ ...,
description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py
index 52ffddb..8dbbb44 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py
@@ -7,8 +7,15 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from ...core.v2_5_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_5_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
+from ...core.v2_5_0.core_nwb_base import (
+ Image,
+ TimeSeries,
+ TimeSeriesStartingTime,
+ TimeSeriesSync,
+ Images,
+)
metamodel_version = "None"
version = "2.5.0"
@@ -28,6 +35,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +87,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +110,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +133,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -141,13 +157,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -160,21 +175,35 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -205,11 +234,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -224,14 +253,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -244,21 +281,35 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -286,24 +337,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -316,21 +366,35 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -358,26 +422,49 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.uint32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the image (using zero-indexing) in the linked Images object.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ indexed_images: Optional[Union[Images, str]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Images"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py
index 3d0cd3b..5a7755b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,28 +312,47 @@ class DecompositionSeries(TimeSeries):
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
bands: DecompositionSeriesBands = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -337,11 +381,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -368,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -377,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -391,24 +436,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -432,7 +475,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -441,84 +489,115 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
+ waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
+ Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
+ }
+ },
+ )
)
waveforms_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
waveforms_index_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -541,14 +620,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py
index 618462b..f397977 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_5_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_5_0.core_nwb_device import Device
metamodel_version = "None"
version = "2.5.0"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py
index 518c841..268b313 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_5_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_5_0.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_5_0.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.5.0"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,25 +124,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -145,21 +162,35 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -188,31 +219,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -239,7 +279,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -254,7 +294,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -269,7 +309,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -293,7 +333,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -304,7 +349,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -316,14 +366,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -346,10 +403,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -372,13 +427,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -401,14 +466,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -427,10 +506,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -444,9 +636,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -458,7 +648,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -481,6 +671,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -495,6 +694,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
index c8b182e..90bd8c1 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py
index 4851576..ec00173 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py
@@ -61,6 +61,9 @@ from ...core.v2_5_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -151,10 +154,11 @@ from ...core.v2_5_0.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
)
@@ -178,6 +182,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
index d890d13..624853e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py
@@ -8,13 +8,29 @@ import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+ Annotated,
+ Type,
+)
from pydantic import (
BaseModel,
ConfigDict,
Field,
RootModel,
field_validator,
+ model_validator,
ValidationInfo,
BeforeValidator,
)
@@ -37,6 +53,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -57,12 +82,155 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
+ """
+ Mixin class for TimeSeriesReferenceVectorData -
+ very simple, just indexing the given timeseries object.
+
+ These shouldn't have additional fields in them, just the three columns
+ for index, span, and timeseries
+ """
+
+ idx_start: NDArray[Shape["*"], int]
+ count: NDArray[Shape["*"], int]
+ timeseries: NDArray
+
+ @model_validator(mode="after")
+ def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin":
+ """
+ Each of the three indexing columns must be the same length to work!
+ """
+ assert len(self.idx_start) == len(self.timeseries) == len(self.count), (
+ f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)},"
+ f" timeseries: {len(self.timeseries)}"
+ )
+ return self
+
+ def __len__(self) -> int:
+ """Since we have ensured equal length, just return idx_start"""
+ return len(self.idx_start)
+
+ @overload
+ def _slice_helper(self, item: int) -> slice: ...
+
+ @overload
+ def _slice_helper(self, item: slice) -> List[slice]: ...
+
+ def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]:
+ if isinstance(item, (int, np.integer)):
+ return slice(self.idx_start[item], self.idx_start[item] + self.count[item])
+ else:
+ starts = self.idx_start[item]
+ ends = starts + self.count[item]
+ return [slice(start, end) for start, end in zip(starts, ends)]
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+
+ if isinstance(item, (int, np.integer)):
+ return self.timeseries[item][self._slice_helper(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.idx_start)))
+ return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item]
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, must be an int, slice, or iterable"
+ )
+
+ def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+ if isinstance(key, (int, np.integer)):
+ self.timeseries[key][self._slice_helper(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.idx_start)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " target Timeseries object if you need more control"
+ )
+ for subitem, subvalue in zip(key, value):
+ self.timeseries[subitem][self._slice_helper(subitem)] = subvalue
+ else:
+ for subitem in key:
+ self.timeseries[subitem][self._slice_helper(subitem)] = value
+ else:
+ raise ValueError(
+ f"Dont know how to index with {key}, must be an int, slice, or iterable"
+ )
+
+
ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -102,7 +270,7 @@ class NWBData(Data):
name: str = Field(...)
-class TimeSeriesReferenceVectorData(VectorData):
+class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
@@ -114,19 +282,23 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
- idx_start: np.int32 = Field(
+ idx_start: NDArray[Shape["*"], int] = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: np.int32 = Field(
+ count: NDArray[Shape["*"], int] = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: NDArray[Shape["*"], TimeSeries] = Field(
+ ...,
+ description="""The TimeSeries that this index applies to""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -146,15 +318,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -169,8 +341,14 @@ class ImageReferences(NWBData):
)
name: str = Field(...)
- image: List[Image] = Field(
- ..., description="""Ordered dataset of references to Image objects."""
+ value: List[Image] = Field(
+ ...,
+ description="""Ordered dataset of references to Image objects.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "reference"}}
+ }
+ },
)
@@ -208,10 +386,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -221,12 +404,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -255,27 +438,29 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- offset: Optional[np.float32] = Field(
+ offset: Optional[float] = Field(
None,
description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -298,11 +483,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -327,7 +516,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -346,15 +535,18 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
order_of_images: Named[Optional[ImageReferences]] = Field(
None,
description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
index bc29452..508ddf8 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,15 +142,16 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, 1 x"], np.number],
- NDArray[Shape["* num_times, 2 x_y"], np.number],
- NDArray[Shape["* num_times, 3 x_y_z"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, 1 x"], float],
+ NDArray[Shape["* num_times, 2 x_y"], float],
+ NDArray[Shape["* num_times, 3 x_y_z"], float],
]
] = Field(None)
@@ -150,7 +165,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -165,7 +180,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -180,7 +195,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -195,7 +210,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -210,7 +225,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -225,7 +240,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -240,7 +255,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
index deeda97..544b533 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
index 6dd5f69..2c241ec 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_6_0_alpha.core_nwb_device import Device
from ...core.v2_6_0_alpha.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -112,37 +122,47 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -171,10 +191,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,24 +207,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -240,7 +270,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -255,7 +285,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -264,7 +294,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -285,16 +320,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -306,7 +350,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -321,7 +365,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -336,7 +380,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -352,14 +396,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -375,9 +428,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -396,7 +461,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -405,7 +470,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -414,6 +479,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -432,17 +506,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
index 635e417..29ed69e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,31 +136,44 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field(
None,
description="""An index into a TimeSeries object.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
index f1e2354..db4b5ca 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals
from ...core.v2_6_0_alpha.core_nwb_misc import Units
from ...core.v2_6_0_alpha.core_nwb_device import Device
from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticStimulusSite
@@ -24,6 +23,7 @@ from ...core.v2_6_0_alpha.core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
)
+from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals
from ...core.v2_6_0_alpha.core_nwb_base import (
NWBData,
NWBContainer,
@@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -105,9 +114,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -123,11 +130,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.6.0"] = Field(
+ "2.6.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.6.0", "ifabsent": "string(2.6.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -141,11 +149,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -183,19 +191,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -283,7 +281,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -311,10 +309,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -329,7 +327,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -342,11 +340,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -365,12 +363,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -383,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: Optional[NDArray[Any, np.float32]] = Field(
+ x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -392,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: Optional[NDArray[Any, np.float32]] = Field(
+ y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -401,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: Optional[NDArray[Any, np.float32]] = Field(
+ z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -410,7 +408,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: Optional[NDArray[Any, np.float32]] = Field(
+ imp: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@@ -419,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -428,7 +426,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: Optional[NDArray[Any, str]] = Field(
+ filtering: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@@ -440,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -449,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -458,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -467,7 +465,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -476,7 +474,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""",
json_schema_extra={
@@ -485,14 +483,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -502,7 +498,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -551,6 +547,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -576,7 +601,7 @@ class Subject(NWBContainer):
age: Optional[SubjectAge] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -611,8 +636,9 @@ class SubjectAge(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "age", "ifabsent": "string(age)"}},
)
reference: Optional[str] = Field(
- None,
+ "birth",
description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(birth)"}},
)
value: str = Field(...)
@@ -623,10 +649,11 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
SubjectAge.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
index c1feffe..b18f673 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py
@@ -5,6 +5,7 @@ from enum import Enum
import re
import sys
import numpy as np
+from ...core.v2_6_0_alpha.core_nwb_device import Device
from ...core.v2_6_0_alpha.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -113,32 +123,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,11 +191,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -187,36 +211,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -245,9 +283,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -262,40 +301,53 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None,
+ stimulus_description: Literal["N/A"] = Field(
+ "N/A",
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -324,31 +376,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -377,9 +443,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -416,31 +485,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -469,9 +552,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -492,11 +578,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -515,11 +604,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -538,11 +630,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -561,11 +654,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -584,11 +680,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -607,11 +706,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -630,11 +732,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -648,31 +751,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -701,9 +818,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -735,6 +853,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -747,7 +874,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -763,17 +890,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -793,17 +923,24 @@ class IntracellularElectrodesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular electrode related metadata."] = Field(
+ "Table for storing intracellular electrode related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular electrode related metadata.",
+ "ifabsent": "string(Table for storing intracellular electrode related metadata.)",
+ }
+ },
)
electrode: List[IntracellularElectrode] = Field(
..., description="""Column for storing the reference to the intracellular electrode."""
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -823,21 +960,33 @@ class IntracellularStimuliTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular stimulus related metadata."] = Field(
+ "Table for storing intracellular stimulus related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular stimulus related metadata.",
+ "ifabsent": "string(Table for storing intracellular stimulus related metadata.)",
+ }
+ },
)
stimulus: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded stimulus for the recording (rows).""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -857,21 +1006,33 @@ class IntracellularResponsesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular response related metadata."] = Field(
+ "Table for storing intracellular response related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular response related metadata.",
+ "ifabsent": "string(Table for storing intracellular response related metadata.)",
+ }
+ },
)
response: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded response for the recording (rows)""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -899,9 +1060,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
}
},
)
- description: Optional[str] = Field(
- None,
+ description: Literal[
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording."
+ ] = Field(
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording.",
description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": (
+ "A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and "
+ "for storing metadata about the intracellular recording."
+ ),
+ "ifabsent": (
+ "string(A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and for "
+ "storing metadata about the intracellular recording.)"
+ ),
+ }
+ },
)
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
@@ -912,14 +1091,14 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
responses: IntracellularResponsesTable = Field(
..., description="""Table for storing intracellular response related metadata."""
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -955,17 +1134,20 @@ class SimultaneousRecordingsTable(DynamicTable):
...,
description="""Index dataset for the recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -988,14 +1170,14 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"}
},
)
- table: Optional[IntracellularRecordingsTable] = Field(
- None,
+ table: IntracellularRecordingsTable = Field(
+ ...,
description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1031,10 +1213,15 @@ class SequentialRecordingsTable(DynamicTable):
...,
description="""Index dataset for the simultaneous_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- stimulus_type: NDArray[Any, str] = Field(
+ stimulus_type: VectorData[NDArray[Any, str]] = Field(
...,
description="""The type of stimulus used for the sequential recording.""",
json_schema_extra={
@@ -1043,14 +1230,12 @@ class SequentialRecordingsTable(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1076,14 +1261,14 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SimultaneousRecordingsTable] = Field(
- None,
+ table: SimultaneousRecordingsTable = Field(
+ ...,
description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1116,17 +1301,20 @@ class RepetitionsTable(DynamicTable):
...,
description="""Index dataset for the sequential_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1152,14 +1340,14 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SequentialRecordingsTable] = Field(
- None,
+ table: SequentialRecordingsTable = Field(
+ ...,
description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1194,17 +1382,20 @@ class ExperimentalConditionsTable(DynamicTable):
...,
description="""Index dataset for the repetitions column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1227,14 +1418,14 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"}
},
)
- table: Optional[RepetitionsTable] = Field(
- None,
+ table: RepetitionsTable = Field(
+ ...,
description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
index e1ee620..ed4d986 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py
@@ -7,12 +7,14 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from ...core.v2_6_0_alpha.core_nwb_device import Device
from numpydantic import NDArray, Shape
from ...core.v2_6_0_alpha.core_nwb_base import (
Image,
TimeSeries,
TimeSeriesStartingTime,
TimeSeriesSync,
+ Images,
)
metamodel_version = "None"
@@ -33,6 +35,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,15 +87,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -99,15 +110,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -122,15 +133,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -146,13 +157,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -165,21 +175,35 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -210,11 +234,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -229,14 +253,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -249,21 +281,35 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -291,24 +337,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -321,21 +366,35 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -363,26 +422,49 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.uint32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the image (using zero-indexing) in the linked Images object.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ indexed_images: Optional[Union[Images, str]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Images"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
index a3861c2..66dad75 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,28 +312,47 @@ class DecompositionSeries(TimeSeries):
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
bands: DecompositionSeriesBands = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -337,11 +381,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -368,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -377,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -391,24 +436,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -432,7 +475,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -441,84 +489,115 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
+ waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
+ Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
+ }
+ },
+ )
)
waveforms_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
waveforms_index_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -541,14 +620,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
index 8b0a950..0371f5d 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_6_0_alpha.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_6_0_alpha.core_nwb_device import Device
metamodel_version = "None"
version = "2.6.0-alpha"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -76,26 +86,40 @@ class OptogeneticSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.number] = Field(
+ data: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -124,11 +148,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
index de696cd..7c3e17b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_6_0_alpha.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_6_0_alpha.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_6_0_alpha.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.6.0-alpha"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,31 +124,37 @@ class OnePhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
- exposure_time: Optional[np.float32] = Field(
+ exposure_time: Optional[float] = Field(
None, description="""Exposure time of the sample; often the inverse of the frequency."""
)
- binning: Optional[np.uint8] = Field(
+ binning: Optional[int] = Field(
None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
)
- power: Optional[np.float32] = Field(
- None, description="""Power of the excitation in mW, if known."""
- )
- intensity: Optional[np.float32] = Field(
+ power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
+ intensity: Optional[float] = Field(
None, description="""Intensity of the excitation in mW/mm^2, if known."""
)
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -151,21 +167,35 @@ class OnePhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -193,25 +223,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -224,21 +261,35 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -267,31 +318,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -318,7 +378,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -333,7 +393,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -348,7 +408,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -372,7 +432,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -383,7 +448,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -395,14 +465,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -425,10 +502,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -451,13 +526,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -480,14 +565,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -506,10 +605,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -523,9 +735,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -537,7 +747,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -560,6 +770,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -575,6 +794,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
index fd2509b..d454105 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py
index c6adbf3..c0c5da0 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py
@@ -62,6 +62,9 @@ from ...core.v2_6_0_alpha.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -152,10 +155,11 @@ from ...core.v2_6_0_alpha.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
SubjectAge,
@@ -180,6 +184,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py
index b76ba30..09a6f8b 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py
@@ -8,13 +8,29 @@ import numpy as np
from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container
from numpydantic import NDArray, Shape
from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData, DynamicTable
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+ Annotated,
+ Type,
+)
from pydantic import (
BaseModel,
ConfigDict,
Field,
RootModel,
field_validator,
+ model_validator,
ValidationInfo,
BeforeValidator,
)
@@ -37,6 +53,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -57,12 +82,155 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
+ """
+ Mixin class for TimeSeriesReferenceVectorData -
+ very simple, just indexing the given timeseries object.
+
+ These shouldn't have additional fields in them, just the three columns
+ for index, span, and timeseries
+ """
+
+ idx_start: NDArray[Shape["*"], int]
+ count: NDArray[Shape["*"], int]
+ timeseries: NDArray
+
+ @model_validator(mode="after")
+ def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin":
+ """
+ Each of the three indexing columns must be the same length to work!
+ """
+ assert len(self.idx_start) == len(self.timeseries) == len(self.count), (
+ f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)},"
+ f" timeseries: {len(self.timeseries)}"
+ )
+ return self
+
+ def __len__(self) -> int:
+ """Since we have ensured equal length, just return idx_start"""
+ return len(self.idx_start)
+
+ @overload
+ def _slice_helper(self, item: int) -> slice: ...
+
+ @overload
+ def _slice_helper(self, item: slice) -> List[slice]: ...
+
+ def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]:
+ if isinstance(item, (int, np.integer)):
+ return slice(self.idx_start[item], self.idx_start[item] + self.count[item])
+ else:
+ starts = self.idx_start[item]
+ ends = starts + self.count[item]
+ return [slice(start, end) for start, end in zip(starts, ends)]
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+
+ if isinstance(item, (int, np.integer)):
+ return self.timeseries[item][self._slice_helper(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.idx_start)))
+ return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item]
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, must be an int, slice, or iterable"
+ )
+
+ def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None:
+ if self._index is not None:
+ raise NotImplementedError(
+ "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is"
+ " never done in the core schema."
+ )
+ if isinstance(key, (int, np.integer)):
+ self.timeseries[key][self._slice_helper(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.idx_start)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " target Timeseries object if you need more control"
+ )
+ for subitem, subvalue in zip(key, value):
+ self.timeseries[subitem][self._slice_helper(subitem)] = subvalue
+ else:
+ for subitem in key:
+ self.timeseries[subitem][self._slice_helper(subitem)] = value
+ else:
+ raise ValueError(
+ f"Dont know how to index with {key}, must be an int, slice, or iterable"
+ )
+
+
ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -102,7 +270,7 @@ class NWBData(Data):
name: str = Field(...)
-class TimeSeriesReferenceVectorData(VectorData):
+class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData):
"""
Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
"""
@@ -114,19 +282,23 @@ class TimeSeriesReferenceVectorData(VectorData):
name: str = Field(
"timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}}
)
- idx_start: np.int32 = Field(
+ idx_start: NDArray[Shape["*"], int] = Field(
...,
description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- count: np.int32 = Field(
+ count: NDArray[Shape["*"], int] = Field(
...,
description="""Number of data samples available in this time series, during this epoch""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ timeseries: NDArray[Shape["*"], TimeSeries] = Field(
+ ...,
+ description="""The TimeSeries that this index applies to""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -146,15 +318,15 @@ class Image(NWBData):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -169,8 +341,14 @@ class ImageReferences(NWBData):
)
name: str = Field(...)
- image: List[Image] = Field(
- ..., description="""Ordered dataset of references to Image objects."""
+ value: List[Image] = Field(
+ ...,
+ description="""Ordered dataset of references to Image objects.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "reference"}}
+ }
+ },
)
@@ -208,10 +386,15 @@ class TimeSeries(NWBDataInterface):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
data: TimeSeriesData = Field(
...,
@@ -221,12 +404,12 @@ class TimeSeries(NWBDataInterface):
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -255,27 +438,29 @@ class TimeSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- conversion: Optional[np.float32] = Field(
- None,
+ conversion: Optional[float] = Field(
+ 1.0,
description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
)
- offset: Optional[np.float32] = Field(
+ offset: Optional[float] = Field(
None,
description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""",
)
- resolution: Optional[np.float32] = Field(
- None,
+ resolution: Optional[float] = Field(
+ -1.0,
description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
continuity: Optional[str] = Field(
None,
description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""",
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* num_times"], Any],
NDArray[Shape["* num_times, * num_dim2"], Any],
@@ -298,11 +483,15 @@ class TimeSeriesStartingTime(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"}
},
)
- rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""")
- unit: Optional[str] = Field(
- None, description="""Unit of measurement for time, which is fixed to 'seconds'."""
+ rate: float = Field(..., description="""Sampling rate, in Hz.""")
+ unit: Literal["seconds"] = Field(
+ "seconds",
+ description="""Unit of measurement for time, which is fixed to 'seconds'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"}
+ },
)
- value: np.float64 = Field(...)
+ value: float = Field(...)
class TimeSeriesSync(ConfiguredBaseModel):
@@ -327,7 +516,7 @@ class ProcessingModule(NWBContainer):
{"from_schema": "core.nwb.base", "tree_root": True}
)
- children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
+ value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field(
None,
json_schema_extra={
"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]}
@@ -346,15 +535,18 @@ class Images(NWBDataInterface):
)
name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}})
- description: Optional[str] = Field(
- None, description="""Description of this collection of images."""
- )
+ description: str = Field(..., description="""Description of this collection of images.""")
image: List[Image] = Field(..., description="""Images stored in this collection.""")
order_of_images: Named[Optional[ImageReferences]] = Field(
None,
description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py
index 991d017..780e83a 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py
@@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -84,21 +93,26 @@ class SpatialSeries(TimeSeries):
reference_frame: Optional[str] = Field(
None, description="""Description defining what exactly 'straight-ahead' means."""
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -128,15 +142,16 @@ class SpatialSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "meters",
description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, 1 x"], np.number],
- NDArray[Shape["* num_times, 2 x_y"], np.number],
- NDArray[Shape["* num_times, 3 x_y_z"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, 1 x"], float],
+ NDArray[Shape["* num_times, 2 x_y"], float],
+ NDArray[Shape["* num_times, 3 x_y_z"], float],
]
] = Field(None)
@@ -150,7 +165,7 @@ class BehavioralEpochs(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[IntervalSeries]] = Field(
+ value: Optional[List[IntervalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}}
)
name: str = Field(...)
@@ -165,7 +180,7 @@ class BehavioralEvents(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -180,7 +195,7 @@ class BehavioralTimeSeries(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -195,7 +210,7 @@ class PupilTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[TimeSeries]] = Field(
+ value: Optional[List[TimeSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}}
)
name: str = Field(...)
@@ -210,7 +225,7 @@ class EyeTracking(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -225,7 +240,7 @@ class CompassDirection(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
@@ -240,7 +255,7 @@ class Position(NWBDataInterface):
{"from_schema": "core.nwb.behavior", "tree_root": True}
)
- children: Optional[List[SpatialSeries]] = Field(
+ value: Optional[List[SpatialSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py
index 0abc50a..24e2e67 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py
@@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
index e11eaad..17de2b5 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py
@@ -16,6 +16,7 @@ from pydantic import (
ValidationInfo,
BeforeValidator,
)
+from ...core.v2_7_0.core_nwb_device import Device
from ...core.v2_7_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -43,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +78,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -112,37 +122,47 @@ class ElectricalSeries(TimeSeries):
description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""",
)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_channels"], np.number],
- NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_channels"], float],
+ NDArray[Shape["* num_times, * num_channels, * num_samples"], float],
] = Field(..., description="""Recorded voltage data.""")
electrodes: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -171,10 +191,10 @@ class SpikeEventSeries(ElectricalSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_events, * num_samples"], np.number],
- NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number],
+ NDArray[Shape["* num_events, * num_samples"], float],
+ NDArray[Shape["* num_events, * num_channels, * num_samples"], float],
] = Field(..., description="""Spike waveforms.""")
- timestamps: NDArray[Shape["* num_times"], np.float64] = Field(
+ timestamps: NDArray[Shape["* num_times"], float] = Field(
...,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -187,24 +207,34 @@ class SpikeEventSeries(ElectricalSeries):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field(
+ channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field(
None,
description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -240,7 +270,7 @@ class FeatureExtraction(NWBDataInterface):
description="""Description of features (eg, ''PC1'') for each of the extracted features.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field(
+ features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field(
...,
description="""Multi-dimensional array of features extracted from each event.""",
json_schema_extra={
@@ -255,7 +285,7 @@ class FeatureExtraction(NWBDataInterface):
}
},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of events that features correspond to (can be a link).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
@@ -264,7 +294,12 @@ class FeatureExtraction(NWBDataInterface):
...,
description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
@@ -285,16 +320,25 @@ class EventDetection(NWBDataInterface):
...,
description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""",
)
- source_idx: NDArray[Shape["* num_events"], np.int32] = Field(
+ source_idx: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Timestamps of events, in seconds.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
+ source_electricalseries: Union[ElectricalSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}],
+ }
+ },
+ )
class EventWaveform(NWBDataInterface):
@@ -306,7 +350,7 @@ class EventWaveform(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[SpikeEventSeries]] = Field(
+ value: Optional[List[SpikeEventSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}}
)
name: str = Field(...)
@@ -321,7 +365,7 @@ class FilteredEphys(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -336,7 +380,7 @@ class LFP(NWBDataInterface):
{"from_schema": "core.nwb.ecephys", "tree_root": True}
)
- children: Optional[List[ElectricalSeries]] = Field(
+ value: Optional[List[ElectricalSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}}
)
name: str = Field(...)
@@ -352,14 +396,23 @@ class ElectrodeGroup(NWBContainer):
)
name: str = Field(...)
- description: Optional[str] = Field(None, description="""Description of this electrode group.""")
- location: Optional[str] = Field(
- None,
+ description: str = Field(..., description="""Description of this electrode group.""")
+ location: str = Field(
+ ...,
description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""",
)
position: Optional[ElectrodeGroupPosition] = Field(
None, description="""stereotaxic or common framework coordinates"""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class ElectrodeGroupPosition(ConfiguredBaseModel):
@@ -375,9 +428,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"}
},
)
- x: Optional[np.float32] = Field(None, description="""x coordinate""")
- y: Optional[np.float32] = Field(None, description="""y coordinate""")
- z: Optional[np.float32] = Field(None, description="""z coordinate""")
+ x: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""x coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ y: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""y coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""z coordinate""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
class ClusterWaveforms(NWBDataInterface):
@@ -396,7 +461,7 @@ class ClusterWaveforms(NWBDataInterface):
waveform_filtering: str = Field(
..., description="""Filtering applied to data before generating mean/sd"""
)
- waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""",
json_schema_extra={
@@ -405,7 +470,7 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
- waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field(
+ waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field(
...,
description="""Stdev of waveforms for each cluster, using the same indices as in mean""",
json_schema_extra={
@@ -414,6 +479,15 @@ class ClusterWaveforms(NWBDataInterface):
}
},
)
+ clustering_interface: Union[Clustering, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Clustering"}, {"range": "string"}],
+ }
+ },
+ )
class Clustering(NWBDataInterface):
@@ -432,17 +506,17 @@ class Clustering(NWBDataInterface):
...,
description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""",
)
- num: NDArray[Shape["* num_events"], np.int32] = Field(
+ num: NDArray[Shape["* num_events"], int] = Field(
...,
description="""Cluster number of each event""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
)
- peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field(
+ peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field(
...,
description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}},
)
- times: NDArray[Shape["* num_events"], np.float64] = Field(
+ times: NDArray[Shape["* num_events"], float] = Field(
...,
description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py
index 2d844c9..1b07dac 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py
@@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -62,7 +71,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -96,7 +105,7 @@ class TimeIntervals(DynamicTable):
)
name: str = Field(...)
- start_time: NDArray[Any, np.float32] = Field(
+ start_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Start time of epoch, in seconds.""",
json_schema_extra={
@@ -105,7 +114,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- stop_time: NDArray[Any, np.float32] = Field(
+ stop_time: VectorData[NDArray[Any, float]] = Field(
...,
description="""Stop time of epoch, in seconds.""",
json_schema_extra={
@@ -114,7 +123,7 @@ class TimeIntervals(DynamicTable):
}
},
)
- tags: Optional[NDArray[Any, str]] = Field(
+ tags: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""User-defined tags that identify or categorize events.""",
json_schema_extra={
@@ -127,31 +136,44 @@ class TimeIntervals(DynamicTable):
None,
description="""Index for tags.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field(
None,
description="""An index into a TimeSeries object.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
timeseries_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index for timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py
index 47cb4ad..20ea663 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py
@@ -7,7 +7,6 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...core.v2_7_0.core_nwb_epoch import TimeIntervals
from ...core.v2_7_0.core_nwb_misc import Units
from ...core.v2_7_0.core_nwb_device import Device
from ...core.v2_7_0.core_nwb_ogen import OptogeneticStimulusSite
@@ -24,6 +23,7 @@ from ...core.v2_7_0.core_nwb_icephys import (
RepetitionsTable,
ExperimentalConditionsTable,
)
+from ...core.v2_7_0.core_nwb_epoch import TimeIntervals
from ...core.v2_7_0.core_nwb_base import (
NWBData,
NWBContainer,
@@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -105,9 +114,7 @@ class ScratchData(NWBData):
)
name: str = Field(...)
- notes: Optional[str] = Field(
- None, description="""Any notes the user has about the dataset being stored"""
- )
+ notes: str = Field(..., description="""Any notes the user has about the dataset being stored""")
class NWBFile(NWBContainer):
@@ -123,11 +130,12 @@ class NWBFile(NWBContainer):
"root",
json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}},
)
- nwb_version: Optional[str] = Field(
- None,
+ nwb_version: Literal["2.7.0"] = Field(
+ "2.7.0",
description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "2.7.0", "ifabsent": "string(2.7.0)"}},
)
- file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field(
+ file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field(
...,
description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""",
json_schema_extra={
@@ -141,11 +149,11 @@ class NWBFile(NWBContainer):
session_description: str = Field(
..., description="""A description of the experimental session and data in the file."""
)
- session_start_time: np.datetime64 = Field(
+ session_start_time: datetime = Field(
...,
description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""",
)
- timestamps_reference_time: np.datetime64 = Field(
+ timestamps_reference_time: datetime = Field(
...,
description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""",
)
@@ -183,19 +191,9 @@ class NWBFile(NWBContainer):
...,
description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""",
)
- intervals: Optional[List[TimeIntervals]] = Field(
+ intervals: Optional[NWBFileIntervals] = Field(
None,
description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""",
- json_schema_extra={
- "linkml_meta": {
- "any_of": [
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- {"range": "TimeIntervals"},
- ]
- }
- },
)
units: Optional[Units] = Field(None, description="""Data about sorted spike units.""")
@@ -291,7 +289,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""",
)
- source_script: Optional[NWBFileGeneralSourceScript] = Field(
+ source_script: Optional[GeneralSourceScript] = Field(
None,
description="""Script file or link to public source code used to create this NWB file.""",
)
@@ -319,10 +317,10 @@ class NWBFileGeneral(ConfiguredBaseModel):
None,
description="""Information about the animal or person from which the data was measured.""",
)
- extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field(
+ extracellular_ephys: Optional[GeneralExtracellularEphys] = Field(
None, description="""Metadata related to extracellular electrophysiology."""
)
- intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field(
+ intracellular_ephys: Optional[GeneralIntracellularEphys] = Field(
None, description="""Metadata related to intracellular electrophysiology."""
)
optogenetics: Optional[List[OptogeneticStimulusSite]] = Field(
@@ -337,7 +335,7 @@ class NWBFileGeneral(ConfiguredBaseModel):
)
-class NWBFileGeneralSourceScript(ConfiguredBaseModel):
+class GeneralSourceScript(ConfiguredBaseModel):
"""
Script file or link to public source code used to create this NWB file.
"""
@@ -350,11 +348,11 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"}
},
)
- file_name: Optional[str] = Field(None, description="""Name of script file.""")
+ file_name: str = Field(..., description="""Name of script file.""")
value: str = Field(...)
-class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
+class GeneralExtracellularEphys(ConfiguredBaseModel):
"""
Metadata related to extracellular electrophysiology.
"""
@@ -373,12 +371,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel):
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Physical group of electrodes."""
)
- electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field(
+ electrodes: Optional[ExtracellularEphysElectrodes] = Field(
None, description="""A table of all electrodes (i.e. channels) used for recording."""
)
-class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
+class ExtracellularEphysElectrodes(DynamicTable):
"""
A table of all electrodes (i.e. channels) used for recording.
"""
@@ -391,7 +389,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
"linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"}
},
)
- x: Optional[NDArray[Any, np.float32]] = Field(
+ x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate of the channel location in the brain (+x is posterior).""",
json_schema_extra={
@@ -400,7 +398,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- y: Optional[NDArray[Any, np.float32]] = Field(
+ y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate of the channel location in the brain (+y is inferior).""",
json_schema_extra={
@@ -409,7 +407,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- z: Optional[NDArray[Any, np.float32]] = Field(
+ z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate of the channel location in the brain (+z is right).""",
json_schema_extra={
@@ -418,7 +416,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- imp: Optional[NDArray[Any, np.float32]] = Field(
+ imp: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""Impedance of the channel, in ohms.""",
json_schema_extra={
@@ -427,7 +425,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- location: NDArray[Any, str] = Field(
+ location: VectorData[NDArray[Any, str]] = Field(
...,
description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
json_schema_extra={
@@ -436,7 +434,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- filtering: Optional[NDArray[Any, str]] = Field(
+ filtering: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of hardware filtering, including the filter name and frequency cutoffs.""",
json_schema_extra={
@@ -448,7 +446,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
group: List[ElectrodeGroup] = Field(
..., description="""Reference to the ElectrodeGroup this electrode is a part of."""
)
- group_name: NDArray[Any, str] = Field(
+ group_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the ElectrodeGroup this electrode is a part of.""",
json_schema_extra={
@@ -457,7 +455,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_x: Optional[NDArray[Any, np.float32]] = Field(
+ rel_x: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""x coordinate in electrode group""",
json_schema_extra={
@@ -466,7 +464,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_y: Optional[NDArray[Any, np.float32]] = Field(
+ rel_y: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""y coordinate in electrode group""",
json_schema_extra={
@@ -475,7 +473,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- rel_z: Optional[NDArray[Any, np.float32]] = Field(
+ rel_z: VectorData[Optional[NDArray[Any, float]]] = Field(
None,
description="""z coordinate in electrode group""",
json_schema_extra={
@@ -484,7 +482,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- reference: Optional[NDArray[Any, str]] = Field(
+ reference: VectorData[Optional[NDArray[Any, str]]] = Field(
None,
description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""",
json_schema_extra={
@@ -493,14 +491,12 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -510,7 +506,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable):
)
-class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
+class GeneralIntracellularEphys(ConfiguredBaseModel):
"""
Metadata related to intracellular electrophysiology.
"""
@@ -559,6 +555,35 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel):
)
+class NWBFileIntervals(ConfiguredBaseModel):
+ """
+ Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"})
+
+ name: Literal["intervals"] = Field(
+ "intervals",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"}
+ },
+ )
+ epochs: Optional[TimeIntervals] = Field(
+ None,
+ description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""",
+ )
+ trials: Optional[TimeIntervals] = Field(
+ None, description="""Repeated experimental events that have a logical grouping."""
+ )
+ invalid_times: Optional[TimeIntervals] = Field(
+ None, description="""Time intervals that should be removed from analysis."""
+ )
+ time_intervals: Optional[List[TimeIntervals]] = Field(
+ None,
+ description="""Optional additional table(s) for describing other experimental time intervals.""",
+ )
+
+
class LabMetaData(NWBContainer):
"""
Lab-specific meta-data.
@@ -584,7 +609,7 @@ class Subject(NWBContainer):
age: Optional[SubjectAge] = Field(
None, description="""Age of subject. Can be supplied instead of 'date_of_birth'."""
)
- date_of_birth: Optional[np.datetime64] = Field(
+ date_of_birth: Optional[datetime] = Field(
None, description="""Date of birth of subject. Can be supplied instead of 'age'."""
)
description: Optional[str] = Field(
@@ -619,8 +644,9 @@ class SubjectAge(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "age", "ifabsent": "string(age)"}},
)
reference: Optional[str] = Field(
- None,
+ "birth",
description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(birth)"}},
)
value: str = Field(...)
@@ -631,10 +657,11 @@ ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
-NWBFileGeneralSourceScript.model_rebuild()
-NWBFileGeneralExtracellularEphys.model_rebuild()
-NWBFileGeneralExtracellularEphysElectrodes.model_rebuild()
-NWBFileGeneralIntracellularEphys.model_rebuild()
+GeneralSourceScript.model_rebuild()
+GeneralExtracellularEphys.model_rebuild()
+ExtracellularEphysElectrodes.model_rebuild()
+GeneralIntracellularEphys.model_rebuild()
+NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
SubjectAge.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py
index b651db2..6993568 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py
@@ -5,6 +5,7 @@ from enum import Enum
import re
import sys
import numpy as np
+from ...core.v2_7_0.core_nwb_device import Device
from ...core.v2_7_0.core_nwb_base import (
TimeSeries,
TimeSeriesStartingTime,
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -113,32 +123,46 @@ class PatchClampSeries(TimeSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""")
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -167,11 +191,11 @@ class PatchClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ ...,
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
)
- array: Optional[NDArray[Shape["* num_times"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times"], float]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}
)
@@ -187,36 +211,50 @@ class CurrentClampSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""")
- bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""")
- capacitance_compensation: Optional[np.float32] = Field(
+ bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""")
+ bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""")
+ capacitance_compensation: Optional[float] = Field(
None, description="""Capacitance compensation, in farads."""
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -245,9 +283,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -262,40 +301,53 @@ class IZeroClampSeries(CurrentClampSeries):
)
name: str = Field(...)
- stimulus_description: Optional[str] = Field(
- None,
+ stimulus_description: Literal["N/A"] = Field(
+ "N/A",
description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""",
+ json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}},
)
- bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
- bridge_balance: np.float32 = Field(
- ..., description="""Bridge balance, in ohms, fixed to 0.0."""
- )
- capacitance_compensation: np.float32 = Field(
+ bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""")
+ bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""")
+ capacitance_compensation: float = Field(
..., description="""Capacitance compensation, in farads, fixed to 0.0."""
)
data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""")
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -324,31 +376,45 @@ class CurrentClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -377,9 +443,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -416,31 +485,45 @@ class VoltageClampSeries(PatchClampSeries):
whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = (
Field(None, description="""Whole cell series resistance compensation, in ohms.""")
)
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -469,9 +552,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["amperes"] = Field(
+ "amperes",
description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"}
+ },
)
value: Any = Field(...)
@@ -492,11 +578,14 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
@@ -515,11 +604,14 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
@@ -538,11 +630,12 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["hertz"] = Field(
+ "hertz",
description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
@@ -561,11 +654,14 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
@@ -584,11 +680,14 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["percent"] = Field(
+ "percent",
description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
@@ -607,11 +706,14 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["farads"] = Field(
+ "farads",
description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"}
+ },
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
@@ -630,11 +732,12 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel):
}
},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["ohms"] = Field(
+ "ohms",
description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}},
)
- value: np.float32 = Field(...)
+ value: float = Field(...)
class VoltageClampStimulusSeries(PatchClampSeries):
@@ -648,31 +751,45 @@ class VoltageClampStimulusSeries(PatchClampSeries):
name: str = Field(...)
data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""")
- stimulus_description: Optional[str] = Field(
- None, description="""Protocol/stimulus name for this patch-clamp dataset."""
+ stimulus_description: str = Field(
+ ..., description="""Protocol/stimulus name for this patch-clamp dataset."""
)
- sweep_number: Optional[np.uint32] = Field(
+ sweep_number: Optional[int] = Field(
None, description="""Sweep number, allows to group different PatchClampSeries together."""
)
- gain: Optional[np.float32] = Field(
+ gain: Optional[float] = Field(
None,
description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ electrode: Union[IntracellularElectrode, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -701,9 +818,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: Literal["volts"] = Field(
+ "volts",
description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""",
+ json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}},
)
value: Any = Field(...)
@@ -735,6 +853,15 @@ class IntracellularElectrode(NWBContainer):
slice: Optional[str] = Field(
None, description="""Information about slice used for recording."""
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
class SweepTable(DynamicTable):
@@ -747,7 +874,7 @@ class SweepTable(DynamicTable):
)
name: str = Field(...)
- sweep_number: NDArray[Any, np.uint32] = Field(
+ sweep_number: VectorData[NDArray[Any, int]] = Field(
...,
description="""Sweep number of the PatchClampSeries in that row.""",
json_schema_extra={
@@ -763,17 +890,20 @@ class SweepTable(DynamicTable):
...,
description="""Index for series.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -793,17 +923,24 @@ class IntracellularElectrodesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular electrode related metadata."] = Field(
+ "Table for storing intracellular electrode related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular electrode related metadata.",
+ "ifabsent": "string(Table for storing intracellular electrode related metadata.)",
+ }
+ },
)
electrode: List[IntracellularElectrode] = Field(
..., description="""Column for storing the reference to the intracellular electrode."""
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -823,28 +960,45 @@ class IntracellularStimuliTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular stimulus related metadata."] = Field(
+ "Table for storing intracellular stimulus related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular stimulus related metadata.",
+ "ifabsent": "string(Table for storing intracellular stimulus related metadata.)",
+ }
+ },
)
stimulus: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded stimulus for the recording (rows).""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
stimulus_template: Named[Optional[TimeSeriesReferenceVectorData]] = Field(
None,
description="""Column storing the reference to the stimulus template for the recording (rows).""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -864,21 +1018,33 @@ class IntracellularResponsesTable(DynamicTable):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
+ description: Literal["Table for storing intracellular response related metadata."] = Field(
+ "Table for storing intracellular response related metadata.",
+ description="""Description of what is in this dynamic table.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": "Table for storing intracellular response related metadata.",
+ "ifabsent": "string(Table for storing intracellular response related metadata.)",
+ }
+ },
)
response: Named[TimeSeriesReferenceVectorData] = Field(
...,
description="""Column storing the reference to the recorded response for the recording (rows)""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -906,9 +1072,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
}
},
)
- description: Optional[str] = Field(
- None,
+ description: Literal[
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording."
+ ] = Field(
+ "A table to group together a stimulus and response from a single electrode and a single"
+ " simultaneous recording and for storing metadata about the intracellular recording.",
description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "equals_string": (
+ "A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and "
+ "for storing metadata about the intracellular recording."
+ ),
+ "ifabsent": (
+ "string(A table to group together a stimulus and response from a "
+ "single electrode and a single simultaneous recording and for "
+ "storing metadata about the intracellular recording.)"
+ ),
+ }
+ },
)
electrodes: IntracellularElectrodesTable = Field(
..., description="""Table for storing intracellular electrode related metadata."""
@@ -919,14 +1103,14 @@ class IntracellularRecordingsTable(AlignedDynamicTable):
responses: IntracellularResponsesTable = Field(
..., description="""Table for storing intracellular response related metadata."""
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- id: NDArray[Shape["* num_rows"], int] = Field(
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -962,17 +1146,20 @@ class SimultaneousRecordingsTable(DynamicTable):
...,
description="""Index dataset for the recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -995,14 +1182,14 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion):
"linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"}
},
)
- table: Optional[IntracellularRecordingsTable] = Field(
- None,
+ table: IntracellularRecordingsTable = Field(
+ ...,
description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1038,10 +1225,15 @@ class SequentialRecordingsTable(DynamicTable):
...,
description="""Index dataset for the simultaneous_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- stimulus_type: NDArray[Any, str] = Field(
+ stimulus_type: VectorData[NDArray[Any, str]] = Field(
...,
description="""The type of stimulus used for the sequential recording.""",
json_schema_extra={
@@ -1050,14 +1242,12 @@ class SequentialRecordingsTable(DynamicTable):
}
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1083,14 +1273,14 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SimultaneousRecordingsTable] = Field(
- None,
+ table: SimultaneousRecordingsTable = Field(
+ ...,
description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1123,17 +1313,20 @@ class RepetitionsTable(DynamicTable):
...,
description="""Index dataset for the sequential_recordings column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1159,14 +1352,14 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion):
}
},
)
- table: Optional[SequentialRecordingsTable] = Field(
- None,
+ table: SequentialRecordingsTable = Field(
+ ...,
description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -1201,17 +1394,20 @@ class ExperimentalConditionsTable(DynamicTable):
...,
description="""Index dataset for the repetitions column.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -1234,14 +1430,14 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion):
"linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"}
},
)
- table: Optional[RepetitionsTable] = Field(
- None,
+ table: RepetitionsTable = Field(
+ ...,
description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py
index 7377214..c2b5aff 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py
@@ -7,8 +7,15 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from ...core.v2_7_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_7_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync
+from ...core.v2_7_0.core_nwb_base import (
+ Image,
+ TimeSeries,
+ TimeSeriesStartingTime,
+ TimeSeriesSync,
+ Images,
+)
metamodel_version = "None"
version = "2.7.0"
@@ -28,6 +35,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,15 +87,15 @@ class GrayscaleImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -94,15 +110,15 @@ class RGBImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -117,15 +133,15 @@ class RGBAImage(Image):
)
name: str = Field(...)
- resolution: Optional[np.float32] = Field(
+ resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter."""
)
description: Optional[str] = Field(None, description="""Description of the image.""")
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* x, * y"], np.number],
- NDArray[Shape["* x, * y, 3 r_g_b"], np.number],
- NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number],
+ NDArray[Shape["* x, * y"], float],
+ NDArray[Shape["* x, * y, 3 r_g_b"], float],
+ NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
@@ -141,13 +157,12 @@ class ImageSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -160,21 +175,35 @@ class ImageSeries(TimeSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -205,11 +234,11 @@ class ImageSeriesExternalFile(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"}
},
)
- starting_frame: Optional[np.int32] = Field(
- None,
+ starting_frame: List[int] = Field(
+ ...,
description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""",
)
- array: Optional[NDArray[Shape["* num_files"], str]] = Field(
+ value: Optional[NDArray[Shape["* num_files"], str]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}}
)
@@ -224,14 +253,22 @@ class ImageMaskSeries(ImageSeries):
)
name: str = Field(...)
+ masked_imageseries: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -244,21 +281,35 @@ class ImageMaskSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -286,24 +337,23 @@ class OpticalSeries(ImageSeries):
)
name: str = Field(...)
- distance: Optional[np.float32] = Field(
+ distance: Optional[float] = Field(
None, description="""Distance from camera/monitor to target/eye."""
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float],
+ NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float],
] = Field(..., description="""Images presented to subject, either grayscale or RGB""")
orientation: Optional[str] = Field(
None,
description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -316,21 +366,35 @@ class OpticalSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -358,26 +422,49 @@ class IndexSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.uint32] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Index of the image (using zero-indexing) in the linked Images object.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ indexed_timeseries: Optional[Union[ImageSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ indexed_images: Optional[Union[Images, str]] = Field(
+ None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Images"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py
index 0cfd65b..a30d3e0 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py
@@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -68,7 +77,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -120,21 +129,26 @@ class AbstractFeatureSeries(TimeSeries):
description="""Description of the features represented in TimeSeries::data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -164,13 +178,14 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel):
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
unit: Optional[str] = Field(
- None,
+ "see 'feature_units'",
description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}},
)
- array: Optional[
+ value: Optional[
Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_features"], np.number],
+ NDArray[Shape["* num_times"], float],
+ NDArray[Shape["* num_times, * num_features"], float],
]
] = Field(None)
@@ -190,21 +205,26 @@ class AnnotationSeries(TimeSeries):
description="""Annotations made during an experiment.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -232,26 +252,31 @@ class IntervalSeries(TimeSeries):
)
name: str = Field(...)
- data: NDArray[Shape["* num_times"], np.int8] = Field(
+ data: NDArray[Shape["* num_times"], int] = Field(
...,
description="""Use values >0 if interval started, <0 if interval ended.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -287,28 +312,47 @@ class DecompositionSeries(TimeSeries):
None,
description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
bands: DecompositionSeriesBands = Field(
...,
description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ source_timeseries: Optional[Union[TimeSeries, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "TimeSeries"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -337,11 +381,12 @@ class DecompositionSeriesData(ConfiguredBaseModel):
"data",
json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
)
- unit: Optional[str] = Field(
- None,
+ unit: str = Field(
+ "no unit",
description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}},
)
- array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field(
+ value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
@@ -368,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable):
"bands",
json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}},
)
- band_name: NDArray[Any, str] = Field(
+ band_name: VectorData[NDArray[Any, str]] = Field(
...,
description="""Name of the band, e.g. theta.""",
json_schema_extra={
@@ -377,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field(
+ band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field(
...,
description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""",
json_schema_extra={
@@ -391,24 +436,22 @@ class DecompositionSeriesBands(DynamicTable):
}
},
)
- band_mean: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The mean Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field(
+ band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field(
...,
description="""The standard deviation of Gaussian filters, in Hz.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -432,7 +475,12 @@ class Units(DynamicTable):
None,
description="""Index into the spike_times dataset.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
spike_times: Optional[UnitsSpikeTimes] = Field(
@@ -441,84 +489,115 @@ class Units(DynamicTable):
obs_intervals_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the obs_intervals dataset.""",
- json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
- },
- )
- obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field(
- None,
- description="""Observation intervals for each unit.""",
json_schema_extra={
"linkml_meta": {
- "array": {
- "dimensions": [
- {"alias": "num_intervals"},
- {"alias": "start_end", "exact_cardinality": 2},
- ]
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
}
}
},
)
+ obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
+ Field(
+ None,
+ description="""Observation intervals for each unit.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {
+ "dimensions": [
+ {"alias": "num_intervals"},
+ {"alias": "start_end", "exact_cardinality": 2},
+ ]
+ }
+ }
+ },
+ )
+ )
electrodes_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into electrodes.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrodes: Named[Optional[DynamicTableRegion]] = Field(
None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from."""
)
- waveform_mean: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_mean: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform mean for each spike unit.""")
- waveform_sd: Optional[
- Union[
- NDArray[Shape["* num_units, * num_samples"], np.float32],
- NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32],
+ waveform_sd: VectorData[
+ Optional[
+ Union[
+ NDArray[Shape["* num_units, * num_samples"], float],
+ NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
+ ]
]
] = Field(None, description="""Spike waveform standard deviation for each spike unit.""")
- waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field(
- None,
- description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
- json_schema_extra={
- "linkml_meta": {
- "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
- }
- },
+ waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = (
+ Field(
+ None,
+ description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""",
+ json_schema_extra={
+ "linkml_meta": {
+ "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]}
+ }
+ },
+ )
)
waveforms_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
waveforms_index_index: Named[Optional[VectorIndex]] = Field(
None,
description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -541,14 +620,12 @@ class UnitsSpikeTimes(VectorData):
"linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"}
},
)
- resolution: Optional[np.float64] = Field(
+ resolution: Optional[float] = Field(
None,
description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py
index 46d16a7..881aea0 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py
@@ -14,6 +14,7 @@ from ...core.v2_7_0.core_nwb_base import (
TimeSeriesSync,
NWBContainer,
)
+from ...core.v2_7_0.core_nwb_device import Device
metamodel_version = "None"
version = "2.7.0"
@@ -33,6 +34,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -77,27 +87,40 @@ class OptogeneticSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(
...,
description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ site: Union[OptogeneticStimulusSite, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -126,11 +149,20 @@ class OptogeneticStimulusSite(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description of stimulation site.""")
- excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
location: str = Field(
...,
description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
)
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py
index bfd5c4e..e17fe42 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py
@@ -21,8 +21,8 @@ from ...hdmf_common.v1_8_0.hdmf_common_table import (
VectorIndex,
VectorData,
)
+from ...core.v2_7_0.core_nwb_device import Device
from numpydantic import NDArray, Shape
-from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...core.v2_7_0.core_nwb_base import (
TimeSeriesStartingTime,
TimeSeriesSync,
@@ -30,6 +30,7 @@ from ...core.v2_7_0.core_nwb_base import (
NWBDataInterface,
NWBContainer,
)
+from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile
metamodel_version = "None"
version = "2.7.0"
@@ -49,6 +50,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -74,7 +84,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel])
def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]:
"""Get the name of the slot that refers to this object"""
- assert isinstance(item, (BaseModel, dict))
+ assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!"
name = info.field_name
if isinstance(item, BaseModel):
item.name = name
@@ -114,31 +124,37 @@ class OnePhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
- exposure_time: Optional[np.float32] = Field(
+ exposure_time: Optional[float] = Field(
None, description="""Exposure time of the sample; often the inverse of the frequency."""
)
- binning: Optional[np.uint8] = Field(
+ binning: Optional[int] = Field(
None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc."""
)
- power: Optional[np.float32] = Field(
- None, description="""Power of the excitation in mW, if known."""
- )
- intensity: Optional[np.float32] = Field(
+ power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""")
+ intensity: Optional[float] = Field(
None, description="""Intensity of the excitation in mW/mm^2, if known."""
)
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -151,21 +167,35 @@ class OnePhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -193,25 +223,32 @@ class TwoPhotonSeries(ImageSeries):
)
name: str = Field(...)
- pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""")
- scan_line_rate: Optional[np.float32] = Field(
+ pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""")
+ scan_line_rate: Optional[float] = Field(
None,
description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""",
)
field_of_view: Optional[
Union[
- NDArray[Shape["2 width_height"], np.float32],
- NDArray[Shape["3 width_height_depth"], np.float32],
+ NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float]
]
] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""")
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
data: Union[
- NDArray[Shape["* frame, * x, * y"], np.number],
- NDArray[Shape["* frame, * x, * y, * z"], np.number],
+ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float]
] = Field(
...,
description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""",
)
- dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field(
+ dimension: Optional[NDArray[Shape["* rank"], int]] = Field(
None,
description="""Number of pixels on x, y, (and z) axes.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}},
@@ -224,21 +261,35 @@ class TwoPhotonSeries(ImageSeries):
None,
description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""",
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
- comments: Optional[str] = Field(
+ device: Optional[Union[Device, str]] = Field(
None,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
+ comments: Optional[str] = Field(
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -267,31 +318,40 @@ class RoiResponseSeries(TimeSeries):
name: str = Field(...)
data: Union[
- NDArray[Shape["* num_times"], np.number],
- NDArray[Shape["* num_times, * num_rois"], np.number],
+ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float]
] = Field(..., description="""Signals from ROIs.""")
rois: Named[DynamicTableRegion] = Field(
...,
description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
- description: Optional[str] = Field(None, description="""Description of the time series.""")
+ description: Optional[str] = Field(
+ "no description",
+ description="""Description of the time series.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}},
+ )
comments: Optional[str] = Field(
- None,
+ "no comments",
description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}},
)
starting_time: Optional[TimeSeriesStartingTime] = Field(
None,
description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""",
)
- timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field(
+ timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field(
None,
description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
)
- control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field(
+ control: Optional[NDArray[Shape["* num_times"], int]] = Field(
None,
description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}},
@@ -318,7 +378,7 @@ class DfOverF(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -333,7 +393,7 @@ class Fluorescence(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[RoiResponseSeries]] = Field(
+ value: Optional[List[RoiResponseSeries]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}}
)
name: str = Field(...)
@@ -348,7 +408,7 @@ class ImageSegmentation(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[PlaneSegmentation]] = Field(
+ value: Optional[List[PlaneSegmentation]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}}
)
name: str = Field(...)
@@ -372,7 +432,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into pixel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
pixel_mask: Optional[PlaneSegmentationPixelMask] = Field(
@@ -383,7 +448,12 @@ class PlaneSegmentation(DynamicTable):
None,
description="""Index into voxel_mask.""",
json_schema_extra={
- "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}}
+ "linkml_meta": {
+ "annotations": {
+ "named": {"tag": "named", "value": True},
+ "source_type": {"tag": "source_type", "value": "neurodata_type_inc"},
+ }
+ }
},
)
voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field(
@@ -395,14 +465,21 @@ class PlaneSegmentation(DynamicTable):
description="""Image stacks that the segmentation masks apply to.""",
json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}},
)
- colnames: Optional[str] = Field(
- None,
+ imaging_plane: Union[ImagingPlane, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImagingPlane"}, {"range": "string"}],
+ }
+ },
+ )
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -425,10 +502,8 @@ class PlaneSegmentationImageMask(VectorData):
"linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"}
},
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -451,13 +526,23 @@ class PlaneSegmentationPixelMask(VectorData):
"linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Pixel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the pixel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -480,14 +565,28 @@ class PlaneSegmentationVoxelMask(VectorData):
"linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"}
},
)
- x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""")
- y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""")
- z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""")
- weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""")
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
+ x: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel x-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- array: Optional[
+ y: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel y-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ z: Optional[NDArray[Shape["*"], int]] = Field(
+ None,
+ description="""Voxel z-coordinate.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ weight: Optional[NDArray[Shape["*"], float]] = Field(
+ None,
+ description="""Weight of the voxel.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -506,10 +605,123 @@ class ImagingPlane(NWBContainer):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[OpticalChannel]] = Field(
- None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}}
- )
name: str = Field(...)
+ description: Optional[str] = Field(None, description="""Description of the imaging plane.""")
+ excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""")
+ imaging_rate: Optional[float] = Field(
+ None,
+ description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""",
+ )
+ indicator: str = Field(..., description="""Calcium indicator.""")
+ location: str = Field(
+ ...,
+ description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""",
+ )
+ manifold: Optional[ImagingPlaneManifold] = Field(
+ None,
+ description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""",
+ )
+ origin_coords: Optional[ImagingPlaneOriginCoords] = Field(
+ None,
+ description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""",
+ )
+ grid_spacing: Optional[ImagingPlaneGridSpacing] = Field(
+ None,
+ description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""",
+ )
+ reference_frame: Optional[str] = Field(
+ None,
+ description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""",
+ )
+ optical_channel: List[OpticalChannel] = Field(
+ ..., description="""An optical channel used to record from an imaging plane."""
+ )
+ device: Union[Device, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "Device"}, {"range": "string"}],
+ }
+ },
+ )
+
+
+class ImagingPlaneManifold(ConfiguredBaseModel):
+ """
+ DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["manifold"] = Field(
+ "manifold",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"}
+ },
+ )
+ conversion: Optional[float] = Field(
+ 1.0,
+ description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}},
+ )
+ unit: Optional[str] = Field(
+ "meters",
+ description="""Base unit of measurement for working with the data. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* height, * width, 3 x_y_z"], float],
+ NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float],
+ ]
+ ] = Field(None)
+
+
+class ImagingPlaneOriginCoords(ConfiguredBaseModel):
+ """
+ Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["origin_coords"] = Field(
+ "origin_coords",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for origin_coords. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
+
+
+class ImagingPlaneGridSpacing(ConfiguredBaseModel):
+ """
+ Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"})
+
+ name: Literal["grid_spacing"] = Field(
+ "grid_spacing",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"}
+ },
+ )
+ unit: str = Field(
+ "meters",
+ description="""Measurement units for grid_spacing. The default value is 'meters'.""",
+ json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}},
+ )
+ value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = (
+ Field(None)
+ )
class OpticalChannel(NWBContainer):
@@ -523,9 +735,7 @@ class OpticalChannel(NWBContainer):
name: str = Field(...)
description: str = Field(..., description="""Description or other notes about the channel.""")
- emission_lambda: np.float32 = Field(
- ..., description="""Emission wavelength for channel, in nm."""
- )
+ emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""")
class MotionCorrection(NWBDataInterface):
@@ -537,7 +747,7 @@ class MotionCorrection(NWBDataInterface):
{"from_schema": "core.nwb.ophys", "tree_root": True}
)
- children: Optional[List[CorrectedImageStack]] = Field(
+ value: Optional[List[CorrectedImageStack]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}}
)
name: str = Field(...)
@@ -560,6 +770,15 @@ class CorrectedImageStack(NWBDataInterface):
...,
description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""",
)
+ original: Union[ImageSeries, str] = Field(
+ ...,
+ json_schema_extra={
+ "linkml_meta": {
+ "annotations": {"source_type": {"tag": "source_type", "value": "link"}},
+ "any_of": [{"range": "ImageSeries"}, {"range": "string"}],
+ }
+ },
+ )
# Model rebuild
@@ -575,6 +794,9 @@ PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMask.model_rebuild()
PlaneSegmentationVoxelMask.model_rebuild()
ImagingPlane.model_rebuild()
+ImagingPlaneManifold.model_rebuild()
+ImagingPlaneOriginCoords.model_rebuild()
+ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
index f65ed6c..1d8b514 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -127,17 +136,13 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -161,17 +166,13 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -195,17 +196,13 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -229,17 +226,13 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel):
}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- unit: Optional[str] = Field(
- None, description="""Unit that axis data is stored in (e.g., degrees)."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -263,24 +256,18 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- focal_depth: Optional[np.float32] = Field(
- None, description="""Focal depth offset, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ focal_depth: float = Field(..., description="""Focal depth offset, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -301,14 +288,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel):
"linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"}
},
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
@@ -332,21 +317,17 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
}
},
)
- bits_per_pixel: Optional[np.int32] = Field(
- None,
+ bits_per_pixel: int = Field(
+ ...,
description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""",
)
- dimension: Optional[np.int32] = Field(
- None,
+ dimension: List[int] = Field(
+ ...,
description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""",
)
- field_of_view: Optional[np.float32] = Field(
- None, description="""Size of viewing area, in meters."""
- )
- format: Optional[str] = Field(
- None, description="""Format of image. Right now only 'raw' is supported."""
- )
- array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field(
+ field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
+ format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
+ value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py
index b347afb..c6c0f39 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py
@@ -63,6 +63,9 @@ from ...core.v2_7_0.core_nwb_ophys import (
PlaneSegmentationPixelMask,
PlaneSegmentationVoxelMask,
ImagingPlane,
+ ImagingPlaneManifold,
+ ImagingPlaneOriginCoords,
+ ImagingPlaneGridSpacing,
OpticalChannel,
MotionCorrection,
CorrectedImageStack,
@@ -153,10 +156,11 @@ from ...core.v2_7_0.core_nwb_file import (
NWBFile,
NWBFileStimulus,
NWBFileGeneral,
- NWBFileGeneralSourceScript,
- NWBFileGeneralExtracellularEphys,
- NWBFileGeneralExtracellularEphysElectrodes,
- NWBFileGeneralIntracellularEphys,
+ GeneralSourceScript,
+ GeneralExtracellularEphys,
+ ExtracellularEphysElectrodes,
+ GeneralIntracellularEphys,
+ NWBFileIntervals,
LabMetaData,
Subject,
SubjectAge,
@@ -181,6 +185,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
index 2ba56a5..8cd8423 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py
@@ -7,6 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.1.0"
@@ -26,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -44,6 +54,7 @@ class LinkMLMeta(RootModel):
return key in self.root
+NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -68,7 +79,13 @@ class CSRMatrix(ConfiguredBaseModel):
)
name: str = Field(...)
- shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""")
+ shape: NDArray[Shape["2 null"], int] = Field(
+ ...,
+ description="""the shape of this sparse matrix""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
+ },
+ )
indices: CSRMatrixIndices = Field(..., description="""column indices""")
indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
data: CSRMatrixData = Field(..., description="""values in the matrix""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
index ffd4424..f571cc5 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py
@@ -4,10 +4,34 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
-import numpy as np
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
metamodel_version = "None"
version = "1.1.0"
@@ -27,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -46,6 +79,709 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -87,7 +823,7 @@ class Index(Data):
)
-class VectorData(Data):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -97,12 +833,10 @@ class VectorData(Data):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
-class VectorIndex(Index):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -131,7 +865,7 @@ class ElementIdentifiers(Data):
)
-class DynamicTableRegion(VectorData):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -141,11 +875,11 @@ class DynamicTableRegion(VectorData):
)
name: str = Field(...)
- table: Optional[DynamicTable] = Field(
- None, description="""Reference to the DynamicTable object that this region applies to."""
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
@@ -161,7 +895,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(Container):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
@@ -171,14 +905,12 @@ class DynamicTable(Container):
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py
index 703fefe..d0a1b95 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
index 32401a4..f0d3be3 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py
@@ -7,6 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.1.2"
@@ -26,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -44,6 +54,7 @@ class LinkMLMeta(RootModel):
return key in self.root
+NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -68,7 +79,13 @@ class CSRMatrix(ConfiguredBaseModel):
)
name: str = Field(...)
- shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""")
+ shape: NDArray[Shape["2 null"], int] = Field(
+ ...,
+ description="""the shape of this sparse matrix""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
+ },
+ )
indices: CSRMatrixIndices = Field(..., description="""column indices""")
indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
data: CSRMatrixData = Field(..., description="""values in the matrix""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
index 0b75bec..17128ad 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py
@@ -4,10 +4,34 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
-import numpy as np
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
metamodel_version = "None"
version = "1.1.2"
@@ -27,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -46,6 +79,709 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -87,7 +823,7 @@ class Index(Data):
)
-class VectorData(Data):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -97,12 +833,10 @@ class VectorData(Data):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
-class VectorIndex(Index):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -131,7 +865,7 @@ class ElementIdentifiers(Data):
)
-class DynamicTableRegion(VectorData):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -141,11 +875,11 @@ class DynamicTableRegion(VectorData):
)
name: str = Field(...)
- table: Optional[DynamicTable] = Field(
- None, description="""Reference to the DynamicTable object that this region applies to."""
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
@@ -161,7 +895,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(Container):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
@@ -171,14 +905,12 @@ class DynamicTable(Container):
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py
index a9507d4..13ff59e 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
index 872d645..7e9fa34 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py
@@ -7,6 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "1.1.3"
@@ -26,6 +27,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -44,6 +54,7 @@ class LinkMLMeta(RootModel):
return key in self.root
+NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -68,7 +79,13 @@ class CSRMatrix(ConfiguredBaseModel):
)
name: str = Field(...)
- shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""")
+ shape: NDArray[Shape["2 null"], int] = Field(
+ ...,
+ description="""the shape of this sparse matrix""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
+ },
+ )
indices: CSRMatrixIndices = Field(..., description="""column indices""")
indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
data: CSRMatrixData = Field(..., description="""values in the matrix""")
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
index ae84bd1..8b25e7d 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py
@@ -4,10 +4,34 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
-import numpy as np
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
metamodel_version = "None"
version = "1.1.3"
@@ -27,6 +51,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -46,6 +79,709 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -87,7 +823,7 @@ class Index(Data):
)
-class VectorData(Data):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on.
"""
@@ -97,10 +833,8 @@ class VectorData(Data):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -110,7 +844,7 @@ class VectorData(Data):
] = Field(None)
-class VectorIndex(Index):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData.
"""
@@ -123,7 +857,7 @@ class VectorIndex(Index):
target: Optional[VectorData] = Field(
None, description="""Reference to the target dataset that this index applies to."""
)
- array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(
+ value: Optional[NDArray[Shape["* num_rows"], Any]] = Field(
None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}
)
@@ -142,7 +876,7 @@ class ElementIdentifiers(Data):
)
-class DynamicTableRegion(VectorData):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -152,13 +886,13 @@ class DynamicTableRegion(VectorData):
)
name: str = Field(...)
- table: Optional[DynamicTable] = Field(
- None, description="""Reference to the DynamicTable object that this region applies to."""
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -180,7 +914,7 @@ class Container(ConfiguredBaseModel):
name: str = Field(...)
-class DynamicTable(Container):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability.
"""
@@ -190,14 +924,12 @@ class DynamicTable(Container):
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py
index e8dac61..284e138 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py
@@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py
new file mode 100644
index 0000000..0880d00
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py
@@ -0,0 +1,97 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py
new file mode 100644
index 0000000..0029140
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py
@@ -0,0 +1,142 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(ConfiguredBaseModel):
+ """
+ a compressed sparse row matrix
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: NDArray[Shape["2 null"], int] = Field(
+ ...,
+ description="""the shape of this sparse matrix""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
+ },
+ )
+ indices: CSRMatrixIndices = Field(..., description="""column indices""")
+ indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
+ data: CSRMatrixData = Field(..., description="""values in the matrix""")
+
+
+class CSRMatrixIndices(ConfiguredBaseModel):
+ """
+ column indices
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["indices"] = Field(
+ "indices",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"}
+ },
+ )
+
+
+class CSRMatrixIndptr(ConfiguredBaseModel):
+ """
+ index pointer
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["indptr"] = Field(
+ "indptr",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"}
+ },
+ )
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ values in the matrix
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixIndices.model_rebuild()
+CSRMatrixIndptr.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py
new file mode 100644
index 0000000..561d242
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py
@@ -0,0 +1,943 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+
+metamodel_version = "None"
+version = "1.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VocabData(VectorData):
+ """
+ Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ vocabulary: NDArray[Shape["* null"], str] = Field(
+ ...,
+ description="""The available items in the controlled vocabulary.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+VocabData.model_rebuild()
+DynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py
new file mode 100644
index 0000000..7314aa8
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_2_0.hdmf_common_sparse import (
+ CSRMatrix,
+ CSRMatrixIndices,
+ CSRMatrixIndptr,
+ CSRMatrixData,
+)
+from ...hdmf_common.v1_2_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ VocabData,
+ DynamicTable,
+)
+from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container
+
+metamodel_version = "None"
+version = "1.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py
new file mode 100644
index 0000000..60eea59
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.2.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py
new file mode 100644
index 0000000..ebdac35
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py
@@ -0,0 +1,143 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_2_1.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.2.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ a compressed sparse row matrix
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: NDArray[Shape["2 null"], int] = Field(
+ ...,
+ description="""the shape of this sparse matrix""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}}
+ },
+ )
+ indices: CSRMatrixIndices = Field(..., description="""column indices""")
+ indptr: CSRMatrixIndptr = Field(..., description="""index pointer""")
+ data: CSRMatrixData = Field(..., description="""values in the matrix""")
+
+
+class CSRMatrixIndices(ConfiguredBaseModel):
+ """
+ column indices
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["indices"] = Field(
+ "indices",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"}
+ },
+ )
+
+
+class CSRMatrixIndptr(ConfiguredBaseModel):
+ """
+ index pointer
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["indptr"] = Field(
+ "indptr",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"}
+ },
+ )
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ values in the matrix
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixIndices.model_rebuild()
+CSRMatrixIndptr.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py
new file mode 100644
index 0000000..9ff2a6c
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py
@@ -0,0 +1,943 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+
+metamodel_version = "None"
+version = "1.2.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VocabData(VectorData):
+ """
+ Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ vocabulary: NDArray[Shape["* null"], str] = Field(
+ ...,
+ description="""The available items in the controlled vocabulary.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+VocabData.model_rebuild()
+DynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py
new file mode 100644
index 0000000..7691d2a
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_2_1.hdmf_common_sparse import (
+ CSRMatrix,
+ CSRMatrixIndices,
+ CSRMatrixIndptr,
+ CSRMatrixData,
+)
+from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_2_1.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ VocabData,
+ DynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.2.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py
new file mode 100644
index 0000000..1752575
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py
new file mode 100644
index 0000000..9331ccc
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py
@@ -0,0 +1,208 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.resources/",
+ "id": "hdmf-common.resources",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.resources",
+ }
+)
+
+
+class ExternalResources(Container):
+ """
+ A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.resources", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ keys: ExternalResourcesKeys = Field(
+ ...,
+ description="""A table for storing user terms that are used to refer to external resources.""",
+ )
+ resources: ExternalResourcesResources = Field(
+ ..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
+ )
+ objects: ExternalResourcesObjects = Field(
+ ...,
+ description="""A table for identifying which objects in a file contain references to external resources.""",
+ )
+ object_keys: ExternalResourcesObjectKeys = Field(
+ ..., description="""A table for identifying which objects use which keys."""
+ )
+
+
+class ExternalResourcesKeys(Data):
+ """
+ A table for storing user terms that are used to refer to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
+
+ name: Literal["keys"] = Field(
+ "keys",
+ json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
+ )
+ key_name: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The user term that maps to one or more resources in the 'resources' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesResources(Data):
+ """
+ A table for mapping user terms (i.e., keys) to resource entities.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
+
+ name: Literal["resources"] = Field(
+ "resources",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"}
+ },
+ )
+ keytable_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the key in the 'keys' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resource_name: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The name of the online resource (e.g., website, database) that has the entity.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resource_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The unique identifier for the resource entity at the resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the resource entity this reference applies to. This can be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjects(Data):
+ """
+ A table for identifying which objects in a file contain references to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
+
+ name: Literal["objects"] = Field(
+ "objects",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
+ },
+ )
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The UUID for the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ field: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjectKeys(Data):
+ """
+ A table for identifying which objects use which keys.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"})
+
+ name: Literal["object_keys"] = Field(
+ "object_keys",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
+ },
+ )
+ objecttable_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'objects' table for the object that holds the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keytable_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'keys' table for the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+ExternalResources.model_rebuild()
+ExternalResourcesKeys.model_rebuild()
+ExternalResourcesResources.model_rebuild()
+ExternalResourcesObjects.model_rebuild()
+ExternalResourcesObjectKeys.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py
new file mode 100644
index 0000000..e01d80e
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_3_0.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ )
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
+ ...,
+ description="""The column indices.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
+ },
+ )
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
+ ...,
+ description="""The row index pointer.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
+ },
+ )
+ data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ The non-zero values in the matrix.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py
new file mode 100644
index 0000000..1ecc2ec
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py
@@ -0,0 +1,943 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+
+metamodel_version = "None"
+version = "1.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VocabData(VectorData):
+ """
+ Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ vocabulary: NDArray[Shape["* null"], str] = Field(
+ ...,
+ description="""The available items in the controlled vocabulary.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}},
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+VocabData.model_rebuild()
+DynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py
new file mode 100644
index 0000000..ef79cd9
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_3_0.hdmf_common_resources import (
+ ExternalResources,
+ ExternalResourcesKeys,
+ ExternalResourcesResources,
+ ExternalResourcesObjects,
+ ExternalResourcesObjectKeys,
+)
+from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_3_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_3_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ VocabData,
+ DynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.resources",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py
new file mode 100644
index 0000000..9d878a5
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py
new file mode 100644
index 0000000..00a62c7
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_4_0.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ )
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
+ ...,
+ description="""The column indices.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
+ },
+ )
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
+ ...,
+ description="""The row index pointer.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
+ },
+ )
+ data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ The non-zero values in the matrix.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py
new file mode 100644
index 0000000..d37f163
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py
@@ -0,0 +1,916 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from numpydantic import NDArray, Shape
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+
+metamodel_version = "None"
+version = "1.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+DynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py
new file mode 100644
index 0000000..43432b8
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py
@@ -0,0 +1,86 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_4_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
index 078665b..5545403 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py
@@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -91,7 +100,7 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
- children: Optional[List[Container]] = Field(
+ value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
index e520f5f..f07bed2 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,17 +80,17 @@ class CSRMatrix(Container):
)
name: str = Field(...)
- shape: Optional[np.uint64] = Field(
- None, description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
- indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field(
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
- indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field(
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
index 9e2b445..c53154c 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py
@@ -4,10 +4,34 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
-import numpy as np
from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
from numpydantic import NDArray, Shape
metamodel_version = "None"
@@ -28,6 +52,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -47,6 +80,709 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -61,7 +797,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(Data):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -71,10 +807,8 @@ class VectorData(Data):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -84,7 +818,7 @@ class VectorData(Data):
] = Field(None)
-class VectorIndex(VectorData):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -97,10 +831,8 @@ class VectorIndex(VectorData):
target: Optional[VectorData] = Field(
None, description="""Reference to the target dataset that this index applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -124,7 +856,7 @@ class ElementIdentifiers(Data):
)
-class DynamicTableRegion(VectorData):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -134,13 +866,13 @@ class DynamicTableRegion(VectorData):
)
name: str = Field(...)
- table: Optional[DynamicTable] = Field(
- None, description="""Reference to the DynamicTable object that this region applies to."""
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -150,7 +882,7 @@ class DynamicTableRegion(VectorData):
] = Field(None)
-class DynamicTable(Container):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -160,14 +892,12 @@ class DynamicTable(Container):
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -177,7 +907,7 @@ class DynamicTable(Container):
)
-class AlignedDynamicTable(DynamicTable):
+class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
@@ -186,18 +916,16 @@ class AlignedDynamicTable(DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py
index b9cf9ac..6d03d3d 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py
@@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py
new file mode 100644
index 0000000..737fa66
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.5.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py
new file mode 100644
index 0000000..41d0d3a
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_5_1.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.5.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ )
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
+ ...,
+ description="""The column indices.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
+ },
+ )
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
+ ...,
+ description="""The row index pointer.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
+ },
+ )
+ data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ The non-zero values in the matrix.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py
new file mode 100644
index 0000000..39a4eb3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py
@@ -0,0 +1,945 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.5.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
+ """
+ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ value: Optional[List[DynamicTable]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
+ )
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+DynamicTable.model_rebuild()
+AlignedDynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py
new file mode 100644
index 0000000..1676f7c
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_5_1.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.5.1"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py
new file mode 100644
index 0000000..21354d9
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.6.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py
new file mode 100644
index 0000000..bc4a505
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_6_0.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.6.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ )
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
+ ...,
+ description="""The column indices.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
+ },
+ )
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
+ ...,
+ description="""The row index pointer.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
+ },
+ )
+ data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ The non-zero values in the matrix.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py
new file mode 100644
index 0000000..da0bc73
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py
@@ -0,0 +1,945 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.6.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
+ """
+ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ value: Optional[List[DynamicTable]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
+ )
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+DynamicTable.model_rebuild()
+AlignedDynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py
new file mode 100644
index 0000000..68060f7
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_6_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.6.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py
new file mode 100644
index 0000000..ec81b87
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+
+metamodel_version = "None"
+version = "1.7.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.base/",
+ "id": "hdmf-common.base",
+ "imports": ["hdmf-common.nwb.language"],
+ "name": "hdmf-common.base",
+ }
+)
+
+
+class Data(ConfiguredBaseModel):
+ """
+ An abstract data type for a dataset.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class Container(ConfiguredBaseModel):
+ """
+ An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ name: str = Field(...)
+
+
+class SimpleMultiContainer(Container):
+ """
+ A simple Container for holding onto multiple containers.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.base", "tree_root": True}
+ )
+
+ value: Optional[List[Container]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
+ )
+ name: str = Field(...)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+Data.model_rebuild()
+Container.model_rebuild()
+SimpleMultiContainer.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py
new file mode 100644
index 0000000..3bfd4a8
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_7_0.hdmf_common_base import Container
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.7.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.sparse/",
+ "id": "hdmf-common.sparse",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.sparse",
+ }
+)
+
+
+class CSRMatrix(Container):
+ """
+ A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.sparse", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ )
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
+ ...,
+ description="""The column indices.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
+ },
+ )
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
+ ...,
+ description="""The row index pointer.""",
+ json_schema_extra={
+ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}}
+ },
+ )
+ data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""")
+
+
+class CSRMatrixData(ConfiguredBaseModel):
+ """
+ The non-zero values in the matrix.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"})
+
+ name: Literal["data"] = Field(
+ "data",
+ json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+CSRMatrix.model_rebuild()
+CSRMatrixData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py
new file mode 100644
index 0000000..627f80d
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py
@@ -0,0 +1,945 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "1.7.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common.table/",
+ "id": "hdmf-common.table",
+ "imports": ["hdmf-common.base", "hdmf-common.nwb.language"],
+ "name": "hdmf-common.table",
+ }
+)
+
+
+class VectorData(VectorDataMixin):
+ """
+ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class VectorIndex(VectorIndexMixin):
+ """
+ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ target: Optional[VectorData] = Field(
+ None, description="""Reference to the target dataset that this index applies to."""
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class ElementIdentifiers(Data):
+ """
+ A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(
+ "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
+ )
+
+
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
+ """
+ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
+ )
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
+ )
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+class DynamicTable(DynamicTableMixin):
+ """
+ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
+ """
+ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-common.table", "tree_root": True}
+ )
+
+ value: Optional[List[DynamicTable]] = Field(
+ None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
+ )
+ name: str = Field(...)
+ colnames: List[str] = Field(
+ ...,
+ description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
+ )
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
+ ...,
+ description="""Array of unique identifiers for the rows of this dynamic table.""",
+ json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
+ )
+ vector_data: Optional[List[VectorData]] = Field(
+ None, description="""Vector columns, including index columns, of this dynamic table."""
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+VectorData.model_rebuild()
+VectorIndex.model_rebuild()
+ElementIdentifiers.model_rebuild()
+DynamicTableRegion.model_rebuild()
+DynamicTable.model_rebuild()
+AlignedDynamicTable.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py
new file mode 100644
index 0000000..56b9f0d
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_7_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+
+metamodel_version = "None"
+version = "1.7.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-common"},
+ },
+ "default_prefix": "hdmf-common/",
+ "description": "Common data structures provided by HDMF",
+ "id": "hdmf-common",
+ "imports": [
+ "hdmf-common.base",
+ "hdmf-common.table",
+ "hdmf-common.sparse",
+ "hdmf-common.nwb.language",
+ ],
+ "name": "hdmf-common",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
index 93ec4a2..0e61a5c 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py
@@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -91,7 +100,7 @@ class SimpleMultiContainer(Container):
{"from_schema": "hdmf-common.base", "tree_root": True}
)
- children: Optional[List[Container]] = Field(
+ value: Optional[List[Container]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}}
)
name: str = Field(...)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
index 230460c..aef8124 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,17 +80,17 @@ class CSRMatrix(Container):
)
name: str = Field(...)
- shape: Optional[np.uint64] = Field(
- None, description="""The shape (number of rows, number of columns) of this sparse matrix."""
+ shape: List[int] = Field(
+ ..., description="""The shape (number of rows, number of columns) of this sparse matrix."""
)
- indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field(
+ indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field(
...,
description="""The column indices.""",
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}}
},
)
- indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field(
+ indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field(
...,
description="""The row index pointer.""",
json_schema_extra={
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
index 46ad6ef..5a29869 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py
@@ -4,10 +4,34 @@ from decimal import Decimal
from enum import Enum
import re
import sys
-from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
-from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
-import numpy as np
from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container
+import pandas as pd
+from typing import (
+ Any,
+ ClassVar,
+ List,
+ Literal,
+ Dict,
+ Optional,
+ Union,
+ Generic,
+ Iterable,
+ Tuple,
+ TypeVar,
+ overload,
+)
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ Field,
+ RootModel,
+ field_validator,
+ model_validator,
+ ValidationInfo,
+ ValidatorFunctionWrapHandler,
+ ValidationError,
+)
+import numpy as np
from numpydantic import NDArray, Shape
metamodel_version = "None"
@@ -28,6 +52,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -47,6 +80,709 @@ class LinkMLMeta(RootModel):
NUMPYDANTIC_VERSION = "1.2.1"
+
+T = TypeVar("T", bound=NDArray)
+
+
+class VectorDataMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorData indexing abilities
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ return self._index[item]
+ else:
+ return self.value[item]
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ if self._index:
+ # Following hdmf, VectorIndex is the thing that knows how to do the slicing
+ self._index[key] = value
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use index as length, if present
+ """
+ if self._index:
+ return len(self._index)
+ else:
+ return len(self.value)
+
+
+class VectorIndexMixin(BaseModel, Generic[T]):
+ """
+ Mixin class to give VectorIndex indexing abilities
+ """
+
+ # redefined in `VectorData`, but included here for testing and type checking
+ value: Optional[T] = None
+ target: Optional["VectorData"] = None
+
+ def __init__(self, value: Optional[NDArray] = None, **kwargs):
+ if value is not None and "value" not in kwargs:
+ kwargs["value"] = value
+ super().__init__(**kwargs)
+
+ def _slice(self, arg: int) -> slice:
+ """
+ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper`
+ """
+ start = 0 if arg == 0 else self.value[arg - 1]
+ end = self.value[arg]
+ return slice(start, end)
+
+ def __getitem__(self, item: Union[int, slice, Iterable]) -> Any:
+ if self.target is None:
+ return self.value[item]
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.target.value[self._slice(item)]
+ elif isinstance(item, (slice, Iterable)):
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.target.value[self._slice(i)] for i in item]
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {item}")
+
+ def __setitem__(self, key: Union[int, slice], value: Any) -> None:
+ """
+ Set a value on the :attr:`.target` .
+
+ .. note::
+
+ Even though we correct the indexing logic from HDMF where the
+ _data_ is the thing that is provided by the API when one accesses
+ table.data (rather than table.data_index as hdmf does),
+ we will set to the target here (rather than to the index)
+ to be consistent. To modify the index, modify `self.value` directly
+
+ """
+ if self.target:
+ if isinstance(key, (int, np.integer)):
+ self.target.value[self._slice(key)] = value
+ elif isinstance(key, (slice, Iterable)):
+ if isinstance(key, slice):
+ key = range(*key.indices(len(self.value)))
+
+ if isinstance(value, Iterable):
+ if len(key) != len(value):
+ raise ValueError(
+ "Can only assign equal-length iterable to a slice, manually index the"
+ " ragged values of of the target VectorData object if you need more"
+ " control"
+ )
+ for i, subval in zip(key, value):
+ self.target.value[self._slice(i)] = subval
+ else:
+ for i in key:
+ self.target.value[self._slice(i)] = value
+ else: # pragma: no cover
+ raise AttributeError(f"Could not index with {key}")
+
+ else:
+ self.value[key] = value
+
+ def __getattr__(self, item: str) -> Any:
+ """
+ Forward getattr to ``value``
+ """
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self.value, item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Get length from value
+ """
+ return len(self.value)
+
+
+class DynamicTableRegionMixin(BaseModel):
+ """
+ Mixin to allow indexing references to regions of dynamictables
+ """
+
+ _index: Optional["VectorIndex"] = None
+
+ table: "DynamicTableMixin"
+ value: Optional[NDArray[Shape["*"], int]] = None
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ...
+
+ def __getitem__(
+ self, item: Union[int, slice, Iterable]
+ ) -> Union[pd.DataFrame, List[pd.DataFrame]]:
+ """
+ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite
+ this being a subclass of ``VectorData``
+ """
+ if self._index:
+ if isinstance(item, (int, np.integer)):
+ # index returns an array of indices,
+ # and indexing table with an array returns a list of rows
+ return self.table[self._index[item]]
+ elif isinstance(item, slice):
+ # index returns a list of arrays of indices,
+ # so we index table with an array to construct
+ # a list of lists of rows
+ return [self.table[idx] for idx in self._index[item]]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+ else:
+ if isinstance(item, (int, np.integer)):
+ return self.table[self.value[item]]
+ elif isinstance(item, (slice, Iterable)):
+ # Return a list of dataframe rows because this is most often used
+ # as a column in a DynamicTable, so while it would normally be
+ # ideal to just return the slice as above as a single df,
+ # we need each row to be separate to fill the column
+ if isinstance(item, slice):
+ item = range(*item.indices(len(self.value)))
+ return [self.table[self.value[i]] for i in item]
+ else: # pragma: no cover
+ raise ValueError(f"Dont know how to index with {item}, need an int or a slice")
+
+ def __setitem__(self, key: Union[int, str, slice], value: Any) -> None:
+ # self.table[self.value[key]] = value
+ raise NotImplementedError(
+ "Assigning values to tables is not implemented yet!"
+ ) # pragma: no cover
+
+
+class DynamicTableMixin(BaseModel):
+ """
+ Mixin to make DynamicTable subclasses behave like tables/dataframes
+
+ Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable`
+ but simplifying along the way :)
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]]
+ NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
+ "id",
+ "name",
+ "colnames",
+ "description",
+ )
+
+ # overridden by subclass but implemented here for testing and typechecking purposes :)
+ colnames: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]:
+ return {k: getattr(self, k) for i, k in enumerate(self.colnames)}
+
+ @overload
+ def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ...
+
+ @overload
+ def __getitem__(self, item: int) -> pd.DataFrame: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ...
+
+ @overload
+ def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[
+ pd.DataFrame,
+ list,
+ "NDArray",
+ "VectorDataMixin",
+ ]: ...
+
+ @overload
+ def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ...
+
+ def __getitem__(
+ self,
+ item: Union[
+ str,
+ int,
+ slice,
+ "NDArray",
+ Tuple[int, Union[int, str]],
+ Tuple[Union[int, slice], ...],
+ ],
+ ) -> Any:
+ """
+ Get an item from the table
+
+ If item is...
+
+ - ``str`` : get the column with this name
+ - ``int`` : get the row at this index
+ - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column
+ - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname')
+ gets the 0th row from ``colname``
+ - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns.
+ returns as a :class:`pandas.DataFrame`
+ """
+ if isinstance(item, str):
+ return self._columns[item]
+ if isinstance(item, (int, slice, np.integer, np.ndarray)):
+ data = self._slice_range(item)
+ index = self.id[item]
+ elif isinstance(item, tuple):
+ if len(item) != 2:
+ raise ValueError(
+ "DynamicTables are 2-dimensional, can't index with more than 2 indices like"
+ f" {item}"
+ )
+
+ # all other cases are tuples of (rows, cols)
+ rows, cols = item
+ if isinstance(cols, (int, slice, np.integer)):
+ cols = self.colnames[cols]
+
+ if isinstance(rows, int) and isinstance(cols, str):
+ # single scalar value
+ return self._columns[cols][rows]
+
+ data = self._slice_range(rows, cols)
+ index = self.id[rows]
+ else:
+ raise ValueError(f"Unsure how to get item with key {item}")
+
+ # cast to DF
+ if not isinstance(index, Iterable):
+ index = [index]
+ index = pd.Index(data=index)
+ return pd.DataFrame(data, index=index)
+
+ def _slice_range(
+ self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None
+ ) -> Dict[str, Union[list, "NDArray", "VectorData"]]:
+ if cols is None:
+ cols = self.colnames
+ elif isinstance(cols, str):
+ cols = [cols]
+ data = {}
+ for k in cols:
+ if isinstance(rows, np.ndarray):
+ # help wanted - this is probably cr*zy slow
+ val = [self._columns[k][i] for i in rows]
+ else:
+ val = self._columns[k][rows]
+
+ # scalars need to be wrapped in series for pandas
+ # do this by the iterability of the rows index not the value because
+ # we want all lengths from this method to be equal, and if the rows are
+ # scalar, that means length == 1
+ if not isinstance(rows, (Iterable, slice)):
+ val = [val]
+
+ data[k] = val
+ return data
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ raise NotImplementedError("TODO") # pragma: no cover
+
+ def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]):
+ """
+ Add a column, appending it to ``colnames``
+ """
+ # don't use this while building the model
+ if not getattr(self, "__pydantic_complete__", False): # pragma: no cover
+ return super().__setattr__(key, value)
+
+ if key not in self.model_fields_set and not key.endswith("_index"):
+ self.colnames.append(key)
+
+ # we get a recursion error if we setattr without having first added to
+ # extras if we need it to be there
+ if key not in self.model_fields and key not in self.__pydantic_extra__:
+ self.__pydantic_extra__[key] = value
+
+ return super().__setattr__(key, value)
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:, :], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if not isinstance(model, dict):
+ return model
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_COLUMN_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_colnames(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct colnames from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if not isinstance(model, dict):
+ return model
+ if "colnames" not in model:
+ colnames = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ model["colnames"] = colnames
+ else:
+ # add any columns not explicitly given an order at the end
+ colnames = model["colnames"].copy()
+ colnames.extend(
+ [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["colnames"]
+ and not isinstance(model[k], VectorIndexMixin)
+ ]
+ )
+ model["colnames"] = colnames
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict:
+ """
+ If extra columns are passed as just lists or arrays, cast to VectorData
+ before we resolve targets for VectorData and VectorIndex pairs.
+
+ See :meth:`.cast_specified_columns` for handling columns in the class specification
+ """
+ # if columns are not in the specification, cast to a generic VectorData
+
+ if isinstance(model, dict):
+ for key, val in model.items():
+ if key in cls.model_fields:
+ continue
+ if not isinstance(val, (VectorData, VectorIndex)):
+ try:
+ if key.endswith("_index"):
+ model[key] = VectorIndex(name=key, description="", value=val)
+ else:
+ model[key] = VectorData(name=key, description="", value=val)
+ except ValidationError as e: # pragma: no cover
+ raise ValidationError(
+ f"field {key} cannot be cast to VectorData from {val}"
+ ) from e
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._columns.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_COLUMN_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._columns.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
+ )
+ return self
+
+ @field_validator("*", mode="wrap")
+ @classmethod
+ def cast_specified_columns(
+ cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo
+ ) -> Any:
+ """
+ If columns *in* the model specification are supplied as arrays,
+ try casting them to the type before validating.
+
+ Columns that are not in the spec are handled separately in
+ :meth:`.cast_extra_columns`
+ """
+ try:
+ return handler(val)
+ except ValidationError as e:
+ annotation = cls.model_fields[info.field_name].annotation
+ if type(annotation).__name__ == "_UnionGenericAlias":
+ annotation = annotation.__args__[0]
+ try:
+ # should pass if we're supposed to be a VectorData column
+ # don't want to override intention here by insisting that it is
+ # *actually* a VectorData column in case an NDArray has been specified for now
+ return handler(
+ annotation(
+ val,
+ name=info.field_name,
+ description=cls.model_fields[info.field_name].description,
+ )
+ )
+ except Exception:
+ raise e from None
+
+
+class AlignedDynamicTableMixin(BaseModel):
+ """
+ Mixin to allow indexing multiple tables that are aligned on a common ID
+
+ A great deal of code duplication because we need to avoid diamond inheritance
+ and also it's not so easy to copy a pydantic validator method.
+ """
+
+ model_config = ConfigDict(extra="allow", validate_assignment=True)
+ __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
+
+ NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
+ "name",
+ "categories",
+ "colnames",
+ "description",
+ )
+
+ name: str = "aligned_table"
+ categories: List[str] = Field(default_factory=list)
+ id: Optional[NDArray[Shape["* num_rows"], int]] = None
+
+ @property
+ def _categories(self) -> Dict[str, "DynamicTableMixin"]:
+ return {k: getattr(self, k) for i, k in enumerate(self.categories)}
+
+ def __getitem__(
+ self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]]
+ ) -> pd.DataFrame:
+ """
+ Mimic hdmf:
+
+ https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261
+ Args:
+ item:
+
+ Returns:
+
+ """
+ if isinstance(item, str):
+ # get a single table
+ return self._categories[item][:]
+ elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
+ # get a slice of a single table
+ return self._categories[item[1]][item[0]]
+ elif isinstance(item, (int, slice, Iterable)):
+ # get a slice of all the tables
+ ids = self.id[item]
+ if not isinstance(ids, Iterable):
+ ids = pd.Series([ids])
+ ids = pd.DataFrame({"id": ids})
+ tables = [ids]
+ for category_name, category in self._categories.items():
+ table = category[item]
+ if isinstance(table, pd.DataFrame):
+ table = table.reset_index()
+ elif isinstance(table, np.ndarray):
+ table = pd.DataFrame({category_name: [table]})
+ elif isinstance(table, Iterable):
+ table = pd.DataFrame({category_name: table})
+ else:
+ raise ValueError(
+ f"Don't know how to construct category table for {category_name}"
+ )
+ tables.append(table)
+
+ names = [self.name] + self.categories
+ # construct below in case we need to support array indexing in the future
+ else:
+ raise ValueError(
+ f"Dont know how to index with {item}, "
+ "need an int, string, slice, ndarray, or tuple[int | slice, str]"
+ )
+
+ df = pd.concat(tables, axis=1, keys=names)
+ df.set_index((self.name, "id"), drop=True, inplace=True)
+ return df
+
+ def __getattr__(self, item: str) -> Any:
+ """Try and use pandas df attrs if we don't have them"""
+ try:
+ return BaseModel.__getattr__(self, item)
+ except AttributeError as e:
+ try:
+ return getattr(self[:], item)
+ except AttributeError:
+ raise e from None
+
+ def __len__(self) -> int:
+ """
+ Use the id column to determine length.
+
+ If the id column doesn't represent length accurately, it's a bug
+ """
+ return len(self.id)
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_id(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Create ID column if not provided
+ """
+ if "id" not in model:
+ lengths = []
+ for key, val in model.items():
+ # don't get lengths of columns with an index
+ if (
+ f"{key}_index" in model
+ or (isinstance(val, VectorData) and val._index)
+ or key in cls.NON_CATEGORY_FIELDS
+ ):
+ continue
+ lengths.append(len(val))
+ model["id"] = np.arange(np.max(lengths))
+
+ return model
+
+ @model_validator(mode="before")
+ @classmethod
+ def create_categories(cls, model: Dict[str, Any]) -> Dict:
+ """
+ Construct categories from arguments.
+
+ the model dict is ordered after python3.6, so we can use that minus
+ anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order
+ """
+ if "categories" not in model:
+ categories = [
+ k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index")
+ ]
+ model["categories"] = categories
+ else:
+ # add any columns not explicitly given an order at the end
+ categories = [
+ k
+ for k in model
+ if k not in cls.NON_COLUMN_FIELDS
+ and not k.endswith("_index")
+ and k not in model["categories"]
+ ]
+ model["categories"].extend(categories)
+ return model
+
+ @model_validator(mode="after")
+ def resolve_targets(self) -> "DynamicTableMixin":
+ """
+ Ensure that any implicitly indexed columns are linked, and create backlinks
+ """
+ for key, col in self._categories.items():
+ if isinstance(col, VectorData):
+ # find an index
+ idx = None
+ for field_name in self.model_fields_set:
+ if field_name in self.NON_CATEGORY_FIELDS or field_name == key:
+ continue
+ # implicit name-based index
+ field = getattr(self, field_name)
+ if isinstance(field, VectorIndex) and (
+ field_name == f"{key}_index" or field.target is col
+ ):
+ idx = field
+ break
+ if idx is not None:
+ col._index = idx
+ idx.target = col
+ return self
+
+ @model_validator(mode="after")
+ def ensure_equal_length_cols(self) -> "DynamicTableMixin":
+ """
+ Ensure that all columns are equal length
+ """
+ lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
+ assert all([length == lengths[0] for length in lengths]), (
+ "Columns are not of equal length! "
+ f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
+ )
+ return self
+
+
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -61,7 +797,7 @@ linkml_meta = LinkMLMeta(
)
-class VectorData(Data):
+class VectorData(VectorDataMixin):
"""
An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
"""
@@ -71,10 +807,8 @@ class VectorData(Data):
)
name: str = Field(...)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -84,7 +818,7 @@ class VectorData(Data):
] = Field(None)
-class VectorIndex(VectorData):
+class VectorIndex(VectorIndexMixin):
"""
Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\".
"""
@@ -97,10 +831,8 @@ class VectorIndex(VectorData):
target: Optional[VectorData] = Field(
None, description="""Reference to the target dataset that this index applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -124,7 +856,7 @@ class ElementIdentifiers(Data):
)
-class DynamicTableRegion(VectorData):
+class DynamicTableRegion(DynamicTableRegionMixin, VectorData):
"""
DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
"""
@@ -134,13 +866,13 @@ class DynamicTableRegion(VectorData):
)
name: str = Field(...)
- table: Optional[DynamicTable] = Field(
- None, description="""Reference to the DynamicTable object that this region applies to."""
+ table: DynamicTable = Field(
+ ..., description="""Reference to the DynamicTable object that this region applies to."""
)
- description: Optional[str] = Field(
- None, description="""Description of what this table region points to."""
+ description: str = Field(
+ ..., description="""Description of what this table region points to."""
)
- array: Optional[
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
@@ -150,7 +882,7 @@ class DynamicTableRegion(VectorData):
] = Field(None)
-class DynamicTable(Container):
+class DynamicTable(DynamicTableMixin):
"""
A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
"""
@@ -160,14 +892,12 @@ class DynamicTable(Container):
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
@@ -177,7 +907,7 @@ class DynamicTable(Container):
)
-class AlignedDynamicTable(DynamicTable):
+class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable):
"""
DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
"""
@@ -186,18 +916,16 @@ class AlignedDynamicTable(DynamicTable):
{"from_schema": "hdmf-common.table", "tree_root": True}
)
- children: Optional[List[DynamicTable]] = Field(
+ value: Optional[List[DynamicTable]] = Field(
None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}}
)
name: str = Field(...)
- colnames: Optional[str] = Field(
- None,
+ colnames: List[str] = Field(
+ ...,
description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
)
- description: Optional[str] = Field(
- None, description="""Description of what is in this dynamic table."""
- )
- id: NDArray[Shape["* num_rows"], int] = Field(
+ description: str = Field(..., description="""Description of what is in this dynamic table.""")
+ id: VectorData[NDArray[Shape["* num_rows"], int]] = Field(
...,
description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py
index 8b9bf5b..66dcf89 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py
@@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
index 065f135..9a95aea 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py
@@ -7,7 +7,7 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData
+from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData
from numpydantic import NDArray, Shape
metamodel_version = "None"
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -55,7 +64,7 @@ linkml_meta = LinkMLMeta(
},
"default_prefix": "hdmf-experimental.experimental/",
"id": "hdmf-experimental.experimental",
- "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"],
+ "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.experimental",
}
)
@@ -71,14 +80,12 @@ class EnumData(VectorData):
)
name: str = Field(...)
- elements: Optional[VectorData] = Field(
- None,
+ elements: VectorData = Field(
+ ...,
description="""Reference to the VectorData object that contains the enumerable elements""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
index db8a186..fdeb151 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py
@@ -7,7 +7,8 @@ import sys
from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
-from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data
+from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "0.1.0"
@@ -27,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -45,6 +55,7 @@ class LinkMLMeta(RootModel):
return key in self.root
+NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -53,7 +64,7 @@ linkml_meta = LinkMLMeta(
},
"default_prefix": "hdmf-experimental.resources/",
"id": "hdmf-experimental.resources",
- "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"],
+ "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"],
"name": "hdmf-experimental.resources",
}
)
@@ -99,9 +110,10 @@ class ExternalResourcesKeys(Data):
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
- key: str = Field(
+ key: NDArray[Shape["*"], str] = Field(
...,
description="""The user term that maps to one or more resources in the 'resources' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -118,12 +130,25 @@ class ExternalResourcesEntities(Data):
"linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
},
)
- keys_idx: np.uint64 = Field(..., description="""The index to the key in the 'keys' table.""")
- resources_idx: np.uint64 = Field(..., description="""The index into the 'resources' table""")
- entity_id: str = Field(..., description="""The unique identifier entity.""")
- entity_uri: str = Field(
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the key in the 'keys' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resources_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index into the 'resources' table""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The unique identifier entity.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_uri: NDArray[Shape["*"], str] = Field(
...,
description="""The URI for the entity this reference applies to. This can be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -140,9 +165,15 @@ class ExternalResourcesResources(Data):
"linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"}
},
)
- resource: str = Field(..., description="""The name of the resource.""")
- resource_uri: str = Field(
- ..., description="""The URI for the resource. This can be an empty string."""
+ resource: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The name of the resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resource_uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the resource. This can be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -159,10 +190,15 @@ class ExternalResourcesObjects(Data):
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
- object_id: str = Field(..., description="""The UUID for the object.""")
- field: str = Field(
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The UUID for the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ field: NDArray[Shape["*"], str] = Field(
...,
description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -179,10 +215,16 @@ class ExternalResourcesObjectKeys(Data):
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
- objects_idx: np.uint64 = Field(
- ..., description="""The index to the 'objects' table for the object that holds the key."""
+ objects_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'objects' table for the object that holds the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'keys' table for the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- keys_idx: np.uint64 = Field(..., description="""The index to the 'keys' table for the key.""")
# Model rebuild
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
index 69ffad1..a9c5e62 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py
@@ -15,15 +15,14 @@ from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import (
ExternalResourcesObjects,
ExternalResourcesObjectKeys,
)
-from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
-from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer
-from ...hdmf_common.v1_5_0.hdmf_common_table import (
+from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_4_0.hdmf_common_table import (
VectorData,
VectorIndex,
ElementIdentifiers,
DynamicTableRegion,
DynamicTable,
- AlignedDynamicTable,
)
from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData
@@ -45,6 +44,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py
new file mode 100644
index 0000000..01b7693
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.experimental/",
+ "id": "hdmf-experimental.experimental",
+ "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.experimental",
+ }
+)
+
+
+class EnumData(VectorData):
+ """
+ Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.experimental", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ elements: VectorData = Field(
+ ...,
+ description="""Reference to the VectorData object that contains the enumerable elements""",
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+EnumData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py
new file mode 100644
index 0000000..81b0840
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py
@@ -0,0 +1,242 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.resources/",
+ "id": "hdmf-experimental.resources",
+ "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.resources",
+ }
+)
+
+
+class ExternalResources(Container):
+ """
+ A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.resources", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ keys: ExternalResourcesKeys = Field(
+ ...,
+ description="""A table for storing user terms that are used to refer to external resources.""",
+ )
+ entities: ExternalResourcesEntities = Field(
+ ..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
+ )
+ resources: ExternalResourcesResources = Field(
+ ..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
+ )
+ objects: ExternalResourcesObjects = Field(
+ ...,
+ description="""A table for identifying which objects in a file contain references to external resources.""",
+ )
+ object_keys: ExternalResourcesObjectKeys = Field(
+ ..., description="""A table for identifying which objects use which keys."""
+ )
+
+
+class ExternalResourcesKeys(Data):
+ """
+ A table for storing user terms that are used to refer to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["keys"] = Field(
+ "keys",
+ json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
+ )
+ key: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The user term that maps to one or more resources in the 'resources' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesEntities(Data):
+ """
+ A table for mapping user terms (i.e., keys) to resource entities.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["entities"] = Field(
+ "entities",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
+ },
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the key in the 'keys' table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resources_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index into the 'resources' table""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The unique identifier entity.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the entity this reference applies to. This can be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesResources(Data):
+ """
+ A table for mapping user terms (i.e., keys) to resource entities.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["resources"] = Field(
+ "resources",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"}
+ },
+ )
+ resource: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The name of the resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ resource_uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the resource. This can be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjects(Data):
+ """
+ A table for identifying which objects in a file contain references to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["objects"] = Field(
+ "objects",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
+ },
+ )
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The UUID for the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ relative_path: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ field: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjectKeys(Data):
+ """
+ A table for identifying which objects use which keys.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["object_keys"] = Field(
+ "object_keys",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
+ },
+ )
+ objects_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'objects' table for the object that holds the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The index to the 'keys' table for the key.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+ExternalResources.model_rebuild()
+ExternalResourcesKeys.model_rebuild()
+ExternalResourcesEntities.model_rebuild()
+ExternalResourcesResources.model_rebuild()
+ExternalResourcesObjects.model_rebuild()
+ExternalResourcesObjectKeys.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py
new file mode 100644
index 0000000..5c8e028
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py
@@ -0,0 +1,98 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_experimental.v0_2_0.hdmf_experimental_resources import (
+ ExternalResources,
+ ExternalResourcesKeys,
+ ExternalResourcesEntities,
+ ExternalResourcesResources,
+ ExternalResourcesObjects,
+ ExternalResourcesObjectKeys,
+)
+from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_5_1.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+from ...hdmf_experimental.v0_2_0.hdmf_experimental_experimental import EnumData
+
+metamodel_version = "None"
+version = "0.2.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental/",
+ "description": (
+ "Experimental data structures provided by HDMF. These are not "
+ "guaranteed to be available in the future."
+ ),
+ "id": "hdmf-experimental",
+ "imports": [
+ "hdmf-experimental.experimental",
+ "hdmf-experimental.resources",
+ "hdmf-experimental.nwb.language",
+ ],
+ "name": "hdmf-experimental",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py
new file mode 100644
index 0000000..effed9e
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.experimental/",
+ "id": "hdmf-experimental.experimental",
+ "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.experimental",
+ }
+)
+
+
+class EnumData(VectorData):
+ """
+ Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.experimental", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ elements: VectorData = Field(
+ ...,
+ description="""Reference to the VectorData object that contains the enumerable elements""",
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+EnumData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py
new file mode 100644
index 0000000..52e554a
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py
@@ -0,0 +1,240 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.resources/",
+ "id": "hdmf-experimental.resources",
+ "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.resources",
+ }
+)
+
+
+class ExternalResources(Container):
+ """
+ A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.resources", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ keys: ExternalResourcesKeys = Field(
+ ...,
+ description="""A table for storing user terms that are used to refer to external resources.""",
+ )
+ files: ExternalResourcesFiles = Field(
+ ..., description="""A table for storing object ids of files used in external resources."""
+ )
+ entities: ExternalResourcesEntities = Field(
+ ..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
+ )
+ objects: ExternalResourcesObjects = Field(
+ ...,
+ description="""A table for identifying which objects in a file contain references to external resources.""",
+ )
+ object_keys: ExternalResourcesObjectKeys = Field(
+ ..., description="""A table for identifying which objects use which keys."""
+ )
+
+
+class ExternalResourcesKeys(Data):
+ """
+ A table for storing user terms that are used to refer to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["keys"] = Field(
+ "keys",
+ json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
+ )
+ key: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesFiles(Data):
+ """
+ A table for storing object ids of files used in external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["files"] = Field(
+ "files",
+ json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}},
+ )
+ file_object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The object id (UUID) of a file that contains objects that refers to external resources.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesEntities(Data):
+ """
+ A table for mapping user terms (i.e., keys) to resource entities.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["entities"] = Field(
+ "entities",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
+ },
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjects(Data):
+ """
+ A table for identifying which objects in a file contain references to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["objects"] = Field(
+ "objects",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
+ },
+ )
+ files_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the file in the `files` table containing the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The object id (UUID) of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ object_type: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The data type of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ relative_path: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ field: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjectKeys(Data):
+ """
+ A table for identifying which objects use which keys.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["object_keys"] = Field(
+ "object_keys",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
+ },
+ )
+ objects_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the object in the `objects` table that holds the key""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+ExternalResources.model_rebuild()
+ExternalResourcesKeys.model_rebuild()
+ExternalResourcesFiles.model_rebuild()
+ExternalResourcesEntities.model_rebuild()
+ExternalResourcesObjects.model_rebuild()
+ExternalResourcesObjectKeys.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py
new file mode 100644
index 0000000..bf78d15
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py
@@ -0,0 +1,98 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_experimental.v0_3_0.hdmf_experimental_resources import (
+ ExternalResources,
+ ExternalResourcesKeys,
+ ExternalResourcesFiles,
+ ExternalResourcesEntities,
+ ExternalResourcesObjects,
+ ExternalResourcesObjectKeys,
+)
+from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_6_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+from ...hdmf_experimental.v0_3_0.hdmf_experimental_experimental import EnumData
+
+metamodel_version = "None"
+version = "0.3.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental/",
+ "description": (
+ "Experimental data structures provided by HDMF. These are not "
+ "guaranteed to be available in the future."
+ ),
+ "id": "hdmf-experimental",
+ "imports": [
+ "hdmf-experimental.experimental",
+ "hdmf-experimental.resources",
+ "hdmf-experimental.nwb.language",
+ ],
+ "name": "hdmf-experimental",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py
@@ -0,0 +1 @@
+
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py
new file mode 100644
index 0000000..49f56f2
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.experimental/",
+ "id": "hdmf-experimental.experimental",
+ "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.experimental",
+ }
+)
+
+
+class EnumData(VectorData):
+ """
+ Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.experimental", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ elements: VectorData = Field(
+ ...,
+ description="""Reference to the VectorData object that contains the enumerable elements""",
+ )
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
+ Union[
+ NDArray[Shape["* dim0"], Any],
+ NDArray[Shape["* dim0, * dim1"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2"], Any],
+ NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any],
+ ]
+ ] = Field(None)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+EnumData.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py
new file mode 100644
index 0000000..acfc7df
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py
@@ -0,0 +1,264 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
+
+metamodel_version = "None"
+version = "0.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+NUMPYDANTIC_VERSION = "1.2.1"
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": False},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental.resources/",
+ "id": "hdmf-experimental.resources",
+ "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"],
+ "name": "hdmf-experimental.resources",
+ }
+)
+
+
+class ExternalResources(Container):
+ """
+ A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta(
+ {"from_schema": "hdmf-experimental.resources", "tree_root": True}
+ )
+
+ name: str = Field(...)
+ keys: ExternalResourcesKeys = Field(
+ ...,
+ description="""A table for storing user terms that are used to refer to external resources.""",
+ )
+ files: ExternalResourcesFiles = Field(
+ ..., description="""A table for storing object ids of files used in external resources."""
+ )
+ entities: ExternalResourcesEntities = Field(
+ ..., description="""A table for mapping user terms (i.e., keys) to resource entities."""
+ )
+ objects: ExternalResourcesObjects = Field(
+ ...,
+ description="""A table for identifying which objects in a file contain references to external resources.""",
+ )
+ object_keys: ExternalResourcesObjectKeys = Field(
+ ..., description="""A table for identifying which objects use which keys."""
+ )
+ entity_keys: ExternalResourcesEntityKeys = Field(
+ ..., description="""A table for identifying which keys use which entity."""
+ )
+
+
+class ExternalResourcesKeys(Data):
+ """
+ A table for storing user terms that are used to refer to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["keys"] = Field(
+ "keys",
+ json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
+ )
+ key: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesFiles(Data):
+ """
+ A table for storing object ids of files used in external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["files"] = Field(
+ "files",
+ json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}},
+ )
+ file_object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The object id (UUID) of a file that contains objects that refers to external resources.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesEntities(Data):
+ """
+ A table for mapping user terms (i.e., keys) to resource entities.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["entities"] = Field(
+ "entities",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
+ },
+ )
+ entity_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ entity_uri: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjects(Data):
+ """
+ A table for identifying which objects in a file contain references to external resources.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["objects"] = Field(
+ "objects",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
+ },
+ )
+ files_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the file in the `files` table containing the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The object id (UUID) of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ object_type: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The data type of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ relative_path: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ field: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesObjectKeys(Data):
+ """
+ A table for identifying which objects use which keys.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["object_keys"] = Field(
+ "object_keys",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
+ },
+ )
+ objects_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the object in the `objects` table that holds the key""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+class ExternalResourcesEntityKeys(Data):
+ """
+ A table for identifying which keys use which entity.
+ """
+
+ linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"})
+
+ name: Literal["entity_keys"] = Field(
+ "entity_keys",
+ json_schema_extra={
+ "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"}
+ },
+ )
+ entities_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the entity in the `entities` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
+ExternalResources.model_rebuild()
+ExternalResourcesKeys.model_rebuild()
+ExternalResourcesFiles.model_rebuild()
+ExternalResourcesEntities.model_rebuild()
+ExternalResourcesObjects.model_rebuild()
+ExternalResourcesObjectKeys.model_rebuild()
+ExternalResourcesEntityKeys.model_rebuild()
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py
new file mode 100644
index 0000000..2422a59
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py
@@ -0,0 +1,99 @@
+from __future__ import annotations
+from datetime import datetime, date
+from decimal import Decimal
+from enum import Enum
+import re
+import sys
+from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
+from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
+import numpy as np
+from ...hdmf_experimental.v0_4_0.hdmf_experimental_resources import (
+ ExternalResources,
+ ExternalResourcesKeys,
+ ExternalResourcesFiles,
+ ExternalResourcesEntities,
+ ExternalResourcesObjects,
+ ExternalResourcesObjectKeys,
+ ExternalResourcesEntityKeys,
+)
+from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData
+from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer
+from ...hdmf_common.v1_7_0.hdmf_common_table import (
+ VectorData,
+ VectorIndex,
+ ElementIdentifiers,
+ DynamicTableRegion,
+ DynamicTable,
+ AlignedDynamicTable,
+)
+from ...hdmf_experimental.v0_4_0.hdmf_experimental_experimental import EnumData
+
+metamodel_version = "None"
+version = "0.4.0"
+
+
+class ConfiguredBaseModel(BaseModel):
+ model_config = ConfigDict(
+ validate_assignment=True,
+ validate_default=True,
+ extra="forbid",
+ arbitrary_types_allowed=True,
+ use_enum_values=True,
+ strict=False,
+ )
+ hdf5_path: Optional[str] = Field(
+ None, description="The absolute path that this object is stored in an NWB file"
+ )
+ object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
+
+class LinkMLMeta(RootModel):
+ root: Dict[str, Any] = {}
+ model_config = ConfigDict(frozen=True)
+
+ def __getattr__(self, key: str):
+ return getattr(self.root, key)
+
+ def __getitem__(self, key: str):
+ return self.root[key]
+
+ def __setitem__(self, key: str, value):
+ self.root[key] = value
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.root
+
+
+linkml_meta = LinkMLMeta(
+ {
+ "annotations": {
+ "is_namespace": {"tag": "is_namespace", "value": True},
+ "namespace": {"tag": "namespace", "value": "hdmf-experimental"},
+ },
+ "default_prefix": "hdmf-experimental/",
+ "description": (
+ "Experimental data structures provided by HDMF. These are not "
+ "guaranteed to be available in the future."
+ ),
+ "id": "hdmf-experimental",
+ "imports": [
+ "hdmf-experimental.experimental",
+ "hdmf-experimental.resources",
+ "hdmf-experimental.nwb.language",
+ ],
+ "name": "hdmf-experimental",
+ }
+)
+
+
+# Model rebuild
+# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
index 13d77f3..263e7b2 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py
@@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -71,14 +80,12 @@ class EnumData(VectorData):
)
name: str = Field(...)
- elements: Optional[VectorData] = Field(
- None,
+ elements: VectorData = Field(
+ ...,
description="""Reference to the VectorData object that contains the enumerable elements""",
)
- description: Optional[str] = Field(
- None, description="""Description of what these vectors represent."""
- )
- array: Optional[
+ description: str = Field(..., description="""Description of what these vectors represent.""")
+ value: Optional[
Union[
NDArray[Shape["* dim0"], Any],
NDArray[Shape["* dim0, * dim1"], Any],
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
index cde0cca..9cbbb79 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py
@@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union
from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator
import numpy as np
from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data
+from numpydantic import NDArray, Shape
metamodel_version = "None"
version = "0.5.0"
@@ -27,6 +28,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
@@ -45,6 +55,7 @@ class LinkMLMeta(RootModel):
return key in self.root
+NUMPYDANTIC_VERSION = "1.2.1"
linkml_meta = LinkMLMeta(
{
"annotations": {
@@ -102,9 +113,10 @@ class HERDKeys(Data):
"keys",
json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}},
)
- key: str = Field(
+ key: NDArray[Shape["*"], str] = Field(
...,
description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -119,9 +131,10 @@ class HERDFiles(Data):
"files",
json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}},
)
- file_object_id: str = Field(
+ file_object_id: NDArray[Shape["*"], str] = Field(
...,
description="""The object id (UUID) of a file that contains objects that refers to external resources.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -138,13 +151,15 @@ class HERDEntities(Data):
"linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"}
},
)
- entity_id: str = Field(
+ entity_id: NDArray[Shape["*"], str] = Field(
...,
description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- entity_uri: str = Field(
+ entity_uri: NDArray[Shape["*"], str] = Field(
...,
description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -161,18 +176,30 @@ class HERDObjects(Data):
"linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"}
},
)
- files_idx: np.uint64 = Field(
- ..., description="""The row index to the file in the `files` table containing the object."""
+ files_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the file in the `files` table containing the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- object_id: str = Field(..., description="""The object id (UUID) of the object.""")
- object_type: str = Field(..., description="""The data type of the object.""")
- relative_path: str = Field(
+ object_id: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The object id (UUID) of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ object_type: NDArray[Shape["*"], str] = Field(
+ ...,
+ description="""The data type of the object.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
+ )
+ relative_path: NDArray[Shape["*"], str] = Field(
...,
description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- field: str = Field(
+ field: NDArray[Shape["*"], str] = Field(
...,
description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -189,11 +216,15 @@ class HERDObjectKeys(Data):
"linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"}
},
)
- objects_idx: np.uint64 = Field(
- ..., description="""The row index to the object in the `objects` table that holds the key"""
+ objects_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the object in the `objects` table that holds the key""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- keys_idx: np.uint64 = Field(
- ..., description="""The row index to the key in the `keys` table."""
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
@@ -210,11 +241,15 @@ class HERDEntityKeys(Data):
"linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"}
},
)
- entities_idx: np.uint64 = Field(
- ..., description="""The row index to the entity in the `entities` table."""
+ entities_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the entity in the `entities` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
- keys_idx: np.uint64 = Field(
- ..., description="""The row index to the key in the `keys` table."""
+ keys_idx: NDArray[Shape["*"], int] = Field(
+ ...,
+ description="""The row index to the key in the `keys` table.""",
+ json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}},
)
diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
index 8f32985..e35c690 100644
--- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
+++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py
@@ -46,6 +46,15 @@ class ConfiguredBaseModel(BaseModel):
)
object_id: Optional[str] = Field(None, description="Unique UUID for each object")
+ def __getitem__(self, val: Union[int, slice]) -> Any:
+ """Try and get a value from value or "data" if we have it"""
+ if hasattr(self, "value") and self.value is not None:
+ return self.value[val]
+ elif hasattr(self, "data") and self.data is not None:
+ return self.data[val]
+ else:
+ raise KeyError("No value or data field to index from")
+
class LinkMLMeta(RootModel):
root: Dict[str, Any] = {}
diff --git a/nwb_linkml/src/nwb_linkml/monkeypatch.py b/nwb_linkml/src/nwb_linkml/monkeypatch.py
index d9da2c5..6222089 100644
--- a/nwb_linkml/src/nwb_linkml/monkeypatch.py
+++ b/nwb_linkml/src/nwb_linkml/monkeypatch.py
@@ -5,56 +5,6 @@ Monkeypatches to external modules
# ruff: noqa: ANN001 - not well defined types for this module
-def patch_npytyping_perf() -> None:
- """
- npytyping makes an expensive call to inspect.stack()
- that makes imports of pydantic models take ~200x longer than
- they should:
-
- References:
- - https://github.com/ramonhagenaars/nptyping/issues/110
- """
- import inspect
- from types import FrameType
-
- from nptyping import base_meta_classes, ndarray, recarray
- from nptyping.pandas_ import dataframe
-
- # make a new __module__ methods for the affected classes
- def new_module_ndarray(cls) -> str:
- return cls._get_module(inspect.currentframe(), "nptyping.ndarray")
-
- def new_module_recarray(cls) -> str:
- return cls._get_module(inspect.currentframe(), "nptyping.recarray")
-
- def new_module_dataframe(cls) -> str:
- return cls._get_module(inspect.currentframe(), "nptyping.pandas_.dataframe")
-
- # and a new _get_module method for the parent class
- def new_get_module(cls, stack: FrameType, module: str) -> str:
- return (
- "typing"
- if inspect.getframeinfo(stack.f_back).function == "formatannotation"
- else module
- )
-
- # now apply the patches
- ndarray.NDArrayMeta.__module__ = property(new_module_ndarray)
- recarray.RecArrayMeta.__module__ = property(new_module_recarray)
- dataframe.DataFrameMeta.__module__ = property(new_module_dataframe)
- base_meta_classes.SubscriptableMeta._get_module = new_get_module
-
-
-def patch_nptyping_warnings() -> None:
- """
- nptyping shits out a bunch of numpy deprecation warnings from using
- olde aliases
- """
- import warnings
-
- warnings.filterwarnings("ignore", category=DeprecationWarning, module="nptyping.*")
-
-
def patch_schemaview() -> None:
"""
Patch schemaview to correctly resolve multiple layers of relative imports.
@@ -211,8 +161,6 @@ def patch_pretty_print() -> None:
def apply_patches() -> None:
"""Apply all monkeypatches"""
- patch_npytyping_perf()
- patch_nptyping_warnings()
patch_schemaview()
patch_array_expression()
patch_pretty_print()
diff --git a/nwb_linkml/src/nwb_linkml/providers/git.py b/nwb_linkml/src/nwb_linkml/providers/git.py
index 05ba68b..8219aaf 100644
--- a/nwb_linkml/src/nwb_linkml/providers/git.py
+++ b/nwb_linkml/src/nwb_linkml/providers/git.py
@@ -36,6 +36,14 @@ class NamespaceRepo(BaseModel):
),
default_factory=list,
)
+ imports: Optional[dict[str, Path]] = Field(
+ None,
+ description=(
+ "Any named imports that are included eg. as submodules within their repository. Dict"
+ " mapping schema name (used in the namespace field) to the namespace file relative to"
+ " the directory containing the **namespace.yaml file** (not the repo root)"
+ ),
+ )
def provide_from_git(self, commit: str | None = None) -> Path:
"""Provide a namespace file from a git repo"""
@@ -61,6 +69,7 @@ NWB_CORE_REPO = NamespaceRepo(
"2.6.0",
"2.7.0",
],
+ imports={"hdmf-common": Path("../hdmf-common-schema") / "common" / "namespace.yaml"},
)
HDMF_COMMON_REPO = NamespaceRepo(
@@ -86,7 +95,7 @@ HDMF_COMMON_REPO = NamespaceRepo(
DEFAULT_REPOS = {
repo.name: repo for repo in [NWB_CORE_REPO, HDMF_COMMON_REPO]
-} # type: Dict[str, NamespaceRepo]
+} # type: dict[str, NamespaceRepo]
class GitError(OSError):
@@ -112,7 +121,7 @@ class GitRepo:
self.namespace = namespace
self._commit = commit
- def _git_call(self, *args: List[str]) -> subprocess.CompletedProcess:
+ def _git_call(self, *args: str) -> subprocess.CompletedProcess:
res = subprocess.run(["git", "-C", self.temp_directory, *args], capture_output=True)
if res.returncode != 0:
raise GitError(
@@ -138,8 +147,11 @@ class GitRepo:
"""
URL for "origin" remote
"""
- res = self._git_call("remote", "get-url", "origin")
- return res.stdout.decode("utf-8").strip()
+ try:
+ res = self._git_call("remote", "get-url", "origin")
+ return res.stdout.decode("utf-8").strip()
+ except GitError:
+ return ""
@property
def active_commit(self) -> str:
@@ -157,6 +169,16 @@ class GitRepo:
"""
return self.temp_directory / self.namespace.path
+ @property
+ def import_namespaces(self) -> dict[str, Path]:
+ """
+ Absolute location of each of the imported namespaces specified in
+ :attr:`.NamespaceRepo.imports`
+ """
+ if self.namespace.imports is None:
+ return {}
+ return {k: (self.namespace_file / v).resolve() for k, v in self.namespace.imports.items()}
+
@property
def commit(self) -> Optional[str]:
"""
diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py
index 831bd2c..4af2bec 100644
--- a/nwb_linkml/src/nwb_linkml/providers/linkml.py
+++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py
@@ -3,8 +3,9 @@ Provider for LinkML schema built from NWB schema
"""
import shutil
+from dataclasses import dataclass
from pathlib import Path
-from typing import Dict, Optional, TypedDict
+from typing import Dict, Optional
from linkml_runtime import SchemaView
from linkml_runtime.dumpers import yaml_dumper
@@ -19,7 +20,8 @@ from nwb_linkml.ui import AdapterProgress
from nwb_schema_language import Namespaces
-class LinkMLSchemaBuild(TypedDict):
+@dataclass
+class LinkMLSchemaBuild:
"""Build result from :meth:`.LinkMLProvider.build`"""
version: str
@@ -283,4 +285,4 @@ class LinkMLProvider(Provider):
)
ns_file = ns_repo.provide_from_git(commit=version)
res = self.build_from_yaml(ns_file)
- return res[namespace]["namespace"]
+ return res[namespace].namespace
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml
index 11d5b6f..f7d7f50 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml
@@ -38,12 +38,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -97,13 +99,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -188,22 +194,27 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -240,10 +251,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -268,8 +283,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -291,6 +306,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml
index f9539be..cb41d79 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml
index ec5c6bc..1b7492f 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml
index 758cec8..52f4c5a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml
@@ -52,6 +52,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -167,6 +170,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -211,6 +217,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -219,8 +236,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -242,8 +259,8 @@ classes:
the ElectricalSeries.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -257,8 +274,8 @@ classes:
properties should be noted in the ElectricalSeries description or comments field.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -278,18 +295,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -304,18 +334,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -364,6 +400,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml
index bc5be75..2e5ca8e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml
index 6a76bfa..6550555 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml
@@ -37,7 +37,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.1.0)
range: text
+ required: true
+ equals_string: 2.1.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -192,14 +195,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -359,7 +357,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -408,13 +406,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -433,8 +431,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -448,6 +446,7 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
@@ -513,8 +512,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -532,11 +531,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -653,8 +652,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -683,3 +682,41 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml
index 6dde850..dc70464 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -187,7 +204,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -267,7 +287,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -285,7 +308,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -303,7 +329,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -322,7 +351,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -341,7 +373,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -360,7 +395,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -379,7 +417,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -398,7 +439,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -434,7 +478,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -492,6 +539,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -523,6 +581,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml
index 4cab9c4..6920484 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml
@@ -132,8 +132,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -151,6 +153,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -217,4 +230,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml
index e0b5dea..4337a4e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -159,6 +161,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -175,9 +188,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -252,6 +267,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -268,6 +286,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -289,6 +310,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -299,6 +323,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -361,3 +388,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml
index 0ee27d7..4f62a54 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml
index ca16020..1183186 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -229,6 +245,17 @@ classes:
range: OpticalChannel
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImagingPlane__manifold:
name: ImagingPlane__manifold
@@ -252,14 +279,18 @@ classes:
x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
the 'conversion' multiplier to get from raw data acquisition pixel units
to meters is 2/1000.
+ ifabsent: float(1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: float32
any_of:
- array:
@@ -290,9 +321,11 @@ classes:
unit:
name: unit
description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: x_y
@@ -315,9 +348,11 @@ classes:
unit:
name: unit
description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: x_y
@@ -353,8 +388,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml
index 900025d..3150687 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml
@@ -28,12 +28,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -54,8 +58,9 @@ classes:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -77,19 +82,25 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
+ required: true
tree_root: true
ImagingRetinotopy:
name: ImagingRetinotopy
@@ -112,6 +123,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Phase response to stimulus on the first measured axis.
range: AxisMap
required: true
@@ -122,6 +136,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: AxisMap
@@ -133,6 +150,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Phase response to stimulus on the second measured axis.
range: AxisMap
required: true
@@ -143,6 +163,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Power response to stimulus on the second measured axis.
range: AxisMap
required: false
@@ -153,6 +176,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: RetinotopyMap
@@ -183,6 +209,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
range: RetinotopyImage
@@ -205,3 +234,4 @@ classes:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml
index 0b809d9..c81fea9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml
@@ -38,12 +38,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -97,13 +99,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -188,22 +194,27 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -240,10 +251,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -268,8 +283,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -291,6 +306,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml
index 1b5ce9c..c555f1a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml
index fdb85e6..e0969c8 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml
index 280381d..2244ea6 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml
@@ -52,6 +52,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -167,6 +170,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -211,6 +217,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -219,8 +236,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -242,8 +259,8 @@ classes:
the ElectricalSeries.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -257,8 +274,8 @@ classes:
properties should be noted in the ElectricalSeries description or comments field.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -278,18 +295,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -304,18 +334,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -364,6 +400,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml
index 2b2093c..e7e66c5 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml
index 44f6d85..2b01096 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml
@@ -37,7 +37,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.2.1)
range: text
+ required: true
+ equals_string: 2.2.1
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -192,14 +195,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -359,7 +357,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -408,13 +406,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -433,8 +431,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -448,6 +446,7 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
@@ -513,8 +512,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -532,11 +531,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -653,8 +652,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -683,3 +682,41 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml
index ab4153b..11c4893 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -187,7 +204,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -267,7 +287,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -285,7 +308,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -303,7 +329,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -322,7 +351,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -341,7 +373,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -360,7 +395,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -379,7 +417,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -398,7 +439,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -434,7 +478,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -492,6 +539,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -523,6 +581,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml
index 495afe3..7f406bc 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml
@@ -132,8 +132,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -151,6 +153,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -217,4 +230,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml
index 758c683..59ff81b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -159,6 +161,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -175,9 +188,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -252,6 +267,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -268,6 +286,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -289,6 +310,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -299,6 +323,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -361,3 +388,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml
index 2ffb3ae..61e640c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml
index cea4194..45df3d6 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -229,6 +245,17 @@ classes:
range: OpticalChannel
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImagingPlane__manifold:
name: ImagingPlane__manifold
@@ -252,14 +279,18 @@ classes:
x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
the 'conversion' multiplier to get from raw data acquisition pixel units
to meters is 2/1000.
+ ifabsent: float(1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. The default
value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: float32
any_of:
- array:
@@ -290,9 +321,11 @@ classes:
unit:
name: unit
description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: x_y
@@ -315,9 +348,11 @@ classes:
unit:
name: unit
description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: x_y
@@ -353,8 +388,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml
index bb1ba70..8543f50 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml
@@ -28,12 +28,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -54,8 +58,9 @@ classes:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -77,19 +82,25 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
+ required: true
tree_root: true
ImagingRetinotopy:
name: ImagingRetinotopy
@@ -112,6 +123,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Phase response to stimulus on the first measured axis.
range: AxisMap
required: true
@@ -122,6 +136,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Power response on the first measured axis. Response is scaled
so 0.0 is no power in the response and 1.0 is maximum relative power.
range: AxisMap
@@ -133,6 +150,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Phase response to stimulus on the second measured axis.
range: AxisMap
required: true
@@ -143,6 +163,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Power response to stimulus on the second measured axis.
range: AxisMap
required: false
@@ -153,6 +176,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Sine of the angle between the direction of the gradient in axis_1
and axis_2.
range: RetinotopyMap
@@ -183,6 +209,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: 'Gray-scale anatomical image of cortical surface. Array structure:
[rows][columns]'
range: RetinotopyImage
@@ -205,3 +234,4 @@ classes:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml
index 5ba1d46..771c828 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml
@@ -38,12 +38,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -97,13 +99,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -188,22 +194,27 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -240,10 +251,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -268,8 +283,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -291,6 +306,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml
index b644d87..b95e9da 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml
index a436b52..2f26c6a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml
index c96af97..70f1c6c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml
@@ -52,6 +52,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -167,6 +170,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -211,6 +217,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -219,8 +236,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -242,8 +259,8 @@ classes:
the ElectricalSeries.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -257,8 +274,8 @@ classes:
properties should be noted in the ElectricalSeries description or comments field.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -278,18 +295,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -304,18 +334,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -364,6 +400,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml
index 47fa3e4..6121bb5 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml
index bec9dfd..1186898 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml
@@ -37,7 +37,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.2.2)
range: text
+ required: true
+ equals_string: 2.2.2
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -192,14 +195,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -359,7 +357,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -408,13 +406,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -433,8 +431,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -448,6 +446,7 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
@@ -513,8 +512,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -532,11 +531,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -653,8 +652,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -683,3 +682,41 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml
index fb1361c..a9c82e9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -187,7 +204,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -267,7 +287,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -285,7 +308,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -303,7 +329,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -322,7 +351,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -341,7 +373,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -360,7 +395,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -379,7 +417,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -398,7 +439,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -434,7 +478,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -492,6 +539,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -523,6 +581,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml
index 71cf395..1e11ca4 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml
@@ -132,8 +132,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -151,6 +153,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -236,4 +249,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml
index 9f47099..de3d137 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -159,6 +161,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -175,9 +188,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -252,6 +267,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -268,6 +286,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -289,6 +310,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -299,6 +323,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -361,3 +388,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml
index 08cb277..f281783 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml
index 3eafce0..a5923c9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -141,14 +157,230 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz.
+ range: float32
+ required: true
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: NWBContainer
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - alias: x_y_z
+ exact_cardinality: 3
+ range: float32
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - alias: x_y_z
+ exact_cardinality: 3
+ range: float32
+ OpticalChannel:
+ name: OpticalChannel
+ description: An optical channel used to record from an imaging plane.
+ is_a: NWBContainer
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description or other notes about the channel.
+ range: text
+ required: true
+ multivalued: false
+ emission_lambda:
+ name: emission_lambda
+ description: Emission wavelength for channel, in nm.
+ range: float32
+ required: true
+ multivalued: false
MotionCorrection:
name: MotionCorrection
description: 'An image stack where all frames are shifted (registered) to a common
@@ -156,8 +388,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml
index b4d0df4..5248fa1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml
index b9b7b76..6a09101 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml
@@ -38,12 +38,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -97,13 +99,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -188,22 +194,27 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -240,10 +251,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -268,8 +283,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -291,6 +306,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml
index b2a980c..836b4eb 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml
index e8f28d6..62fc686 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml
index 9de48e5..166af8a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml
@@ -52,6 +52,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -167,6 +170,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -211,6 +217,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -219,8 +236,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -242,8 +259,8 @@ classes:
the ElectricalSeries.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -257,8 +274,8 @@ classes:
properties should be noted in the ElectricalSeries description or comments field.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -278,18 +295,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -304,18 +334,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -364,6 +400,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml
index 51e899c..68a7004 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml
index 0c91895..45add51 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.2.4)
range: text
+ required: true
+ equals_string: 2.2.4
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -373,7 +372,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -422,13 +421,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -447,8 +446,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -462,12 +461,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -485,11 +485,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -606,8 +606,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -636,6 +636,44 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml
index 83cf963..200456d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -187,7 +204,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -267,7 +287,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -285,7 +308,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -303,7 +329,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -322,7 +351,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -341,7 +373,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -360,7 +395,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -379,7 +417,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -398,7 +439,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -434,7 +478,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -492,6 +539,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -523,6 +581,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml
index dea4aa6..4beec01 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml
@@ -132,8 +132,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -151,6 +153,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -236,4 +249,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml
index 01a3ce4..82d4a9f 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -159,6 +161,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -175,9 +188,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -252,6 +267,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -268,6 +286,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -289,6 +310,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -299,6 +323,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -361,3 +388,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml
index d158d51..ec81c05 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml
index 547d0d5..4d7568e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -158,6 +174,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -176,6 +195,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -196,6 +218,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -225,18 +258,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -256,24 +295,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -282,14 +329,210 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - alias: x_y_z
+ exact_cardinality: 3
+ range: float32
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - alias: x_y_z
+ exact_cardinality: 3
+ range: float32
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -319,8 +562,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -349,4 +592,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml
index 77858f0..5869564 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml
index dbf0921..477b107 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml
@@ -38,12 +38,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -97,13 +99,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -188,22 +194,27 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -240,10 +251,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -268,8 +283,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -291,6 +306,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml
index 5ecb8fb..97c06d8 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml
index 32b7d65..9572c3b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml
index 73148dd..d7e2d98 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml
@@ -52,6 +52,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -167,6 +170,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -211,6 +217,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -219,8 +236,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -242,8 +259,8 @@ classes:
the ElectricalSeries.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -257,8 +274,8 @@ classes:
properties should be noted in the ElectricalSeries description or comments field.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -278,18 +295,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -304,18 +334,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -364,6 +400,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml
index 13cf7c6..2b4fbde 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml
index 5974be3..f52421b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.2.5)
range: text
+ required: true
+ equals_string: 2.2.5
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -373,7 +372,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -422,13 +421,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -447,8 +446,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -462,12 +461,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -485,11 +485,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -606,8 +606,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -636,6 +636,44 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml
index 015c804..9eb505a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -187,7 +204,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -267,7 +287,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -285,7 +308,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -303,7 +329,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -322,7 +351,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -341,7 +373,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -360,7 +395,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -379,7 +417,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -398,7 +439,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -434,7 +478,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -492,6 +539,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -523,6 +581,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml
index eaa5e52..4218d3b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml
@@ -132,8 +132,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -151,6 +153,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -236,4 +249,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml
index 115f5ac..f163348 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -159,6 +161,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -175,9 +188,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -252,6 +267,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -268,6 +286,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -289,6 +310,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -299,6 +323,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -361,3 +388,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml
index 2be6f23..08d7a0c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml
index 902cace..2424cb5 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -158,6 +174,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -176,6 +195,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -196,6 +218,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -225,18 +258,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -256,24 +295,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -282,14 +329,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -319,8 +568,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -349,4 +598,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml
index c3acb3f..457b1cc 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml
index 9eff269..ab7eabf 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml
@@ -39,12 +39,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -98,13 +100,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -189,20 +195,25 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
+ required: true
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
@@ -215,8 +226,9 @@ classes:
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -253,10 +265,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -281,8 +297,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -304,6 +320,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
index 59a32d3..07bb957 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml
index c0f4e17..7881fcf 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
index 888caab..2863bba 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml
@@ -37,6 +37,7 @@ classes:
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
+ required: false
data:
name: data
description: Recorded voltage data.
@@ -62,6 +63,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -177,6 +181,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -221,6 +228,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -229,8 +247,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -252,8 +270,8 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -267,8 +285,8 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -288,18 +306,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -314,18 +345,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -374,6 +411,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
index 915eba7..ce14120 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml
index 9b3da02..0b76f4f 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.3.0)
range: text
+ required: true
+ equals_string: 2.3.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -373,7 +372,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -422,13 +421,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -447,8 +446,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -462,12 +461,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -485,11 +485,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -607,8 +607,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -637,6 +637,44 @@ classes:
range: SweepTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
index d100eb5..d93bb52 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -141,7 +158,10 @@ classes:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
+ ifabsent: string(N/A)
range: text
+ required: true
+ equals_string: N/A
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
@@ -192,7 +212,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -272,7 +295,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -290,7 +316,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -308,7 +337,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -327,7 +359,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -346,7 +381,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -365,7 +403,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -384,7 +425,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -403,7 +447,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -439,7 +486,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -497,6 +547,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -528,6 +589,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml
index 52b587d..bbbcfce 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml
@@ -104,6 +104,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImageSeries__external_file:
name: ImageSeries__external_file
@@ -133,8 +144,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -152,6 +165,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -237,4 +261,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
index 1e5e813..89d5ee0 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -158,6 +160,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
@@ -170,6 +175,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -186,9 +202,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -263,6 +281,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -279,6 +300,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -300,6 +324,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -310,6 +337,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -390,6 +420,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
@@ -401,6 +434,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
@@ -426,3 +462,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
index 48992da..3148b98 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
index 53f031e..b208d50 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -158,6 +174,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -176,6 +195,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -196,6 +218,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -225,18 +258,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -256,24 +295,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -282,14 +329,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -319,8 +568,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -349,4 +598,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
index 35c4f49..cc06e90 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml
index 8d225c1..1d817de 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml
@@ -41,6 +41,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
@@ -48,12 +50,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: true
multivalued: false
@@ -73,12 +79,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -132,13 +140,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -223,20 +235,25 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
+ required: true
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
@@ -249,8 +266,9 @@ classes:
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -287,10 +305,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -315,8 +337,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -338,6 +360,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
index 27895f5..322a4d9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml
@@ -61,9 +61,11 @@ classes:
description: Base unit of measurement for working with the data. The default
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -89,8 +91,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -103,8 +105,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -117,8 +119,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -130,8 +132,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -143,8 +145,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -160,8 +162,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -173,8 +175,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml
index fda15e6..fc320af 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
index 00ebe6f..dd93758 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml
@@ -37,6 +37,7 @@ classes:
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
+ required: false
data:
name: data
description: Recorded voltage data.
@@ -62,6 +63,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -177,6 +181,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -221,6 +228,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -229,8 +247,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -252,8 +270,8 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -267,8 +285,8 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -288,18 +306,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -314,18 +345,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -374,6 +411,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
index 19ce0d0..c3fb2cb 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -73,6 +76,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
@@ -94,6 +100,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
@@ -101,12 +109,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch.
+ array:
+ exact_number_dimensions: 1
range: int32
required: false
multivalued: false
timeseries:
name: timeseries
description: the TimeSeries that this index applies to.
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: false
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml
index ed8fdda..13bf8a1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.4.0)
range: text
+ required: true
+ equals_string: 2.4.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -373,7 +372,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -422,13 +421,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -447,8 +446,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -462,12 +461,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -485,11 +485,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -607,8 +607,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -691,6 +691,44 @@ classes:
range: ExperimentalConditionsTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
index 2150103..346751e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -120,7 +134,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -141,7 +158,10 @@ classes:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
+ ifabsent: string(N/A)
range: text
+ required: true
+ equals_string: N/A
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
@@ -192,7 +212,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -272,7 +295,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -290,7 +316,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -308,7 +337,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -327,7 +359,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -346,7 +381,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -365,7 +403,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -384,7 +425,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -403,7 +447,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -439,7 +486,10 @@ classes:
description: Base unit of measurement for working with the data. which is
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -497,6 +547,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -531,6 +592,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
@@ -548,7 +612,10 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular electrode related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular electrode related metadata.
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
@@ -568,13 +635,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular stimulus related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular stimulus related metadata.
stimulus:
name: stimulus
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded stimulus for the
recording (rows).
range: TimeSeriesReferenceVectorData
@@ -593,13 +666,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular response related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular response related metadata.
response:
name: response
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded response for the
recording (rows)
range: TimeSeriesReferenceVectorData
@@ -631,7 +710,14 @@ classes:
name: description
description: Description of the contents of this table. Inherited from AlignedDynamicTable
and overwritten here to fix the value of the attribute.
+ ifabsent: string(A table to group together a stimulus and response from a
+ single electrode and a single simultaneous recording and for storing metadata
+ about the intracellular recording.)
range: text
+ required: true
+ equals_string: A table to group together a stimulus and response from a single
+ electrode and a single simultaneous recording and for storing metadata about
+ the intracellular recording.
electrodes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
@@ -677,6 +763,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the recordings column.
range: VectorIndex
required: true
@@ -700,6 +789,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
+ required: true
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
@@ -727,6 +817,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
@@ -759,6 +852,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
+ required: true
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
@@ -786,6 +880,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
@@ -809,6 +906,7 @@ classes:
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
+ required: true
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
@@ -833,6 +931,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
@@ -855,3 +956,4 @@ classes:
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml
index 71e1c2a..ac28a30 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml
@@ -105,6 +105,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImageSeries__external_file:
name: ImageSeries__external_file
@@ -134,8 +145,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -153,6 +166,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -238,4 +262,15 @@ classes:
range: int32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
index 5910e70..97927d6 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -158,6 +160,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
@@ -170,6 +175,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -186,9 +202,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -263,6 +281,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -279,6 +300,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -300,6 +324,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -310,6 +337,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -390,6 +420,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
@@ -401,6 +434,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
@@ -426,3 +462,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
index 4786e5d..1add778 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
index b76d14a..4317684 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -158,6 +174,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -176,6 +195,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -196,6 +218,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -225,18 +258,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -256,24 +295,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -282,14 +329,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -319,8 +568,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -349,4 +598,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
index 4a62dbb..f433f10 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml
index ae17645..e6679dc 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml
@@ -41,6 +41,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
@@ -48,12 +50,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: true
multivalued: false
@@ -73,12 +79,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -107,8 +115,12 @@ classes:
name: name
range: string
required: true
- image:
- name: image
+ value:
+ name: value
+ annotations:
+ source_type:
+ tag: source_type
+ value: reference
description: Ordered dataset of references to Image objects.
range: Image
required: true
@@ -148,13 +160,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -239,7 +255,9 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
@@ -248,19 +266,23 @@ classes:
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
+ required: true
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
@@ -273,8 +295,9 @@ classes:
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -311,10 +334,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -339,8 +366,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -364,6 +391,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
@@ -376,6 +404,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Ordered dataset of references to Image objects stored in the
parent group. Each Image object in the Images group should be stored once
and only once, so the dataset should have the same length as the number
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
index 226b5e1..e1d735b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml
@@ -62,9 +62,11 @@ classes:
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -101,8 +103,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -115,8 +117,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -129,8 +131,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -142,8 +144,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -155,8 +157,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -172,8 +174,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -185,8 +187,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml
index 64b6e98..3f1acc9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
index 2172085..2efc5c7 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml
@@ -37,6 +37,7 @@ classes:
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
+ required: false
data:
name: data
description: Recorded voltage data.
@@ -62,6 +63,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -177,6 +181,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -221,6 +228,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -229,8 +247,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -252,8 +270,8 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -267,8 +285,8 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -288,18 +306,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -314,18 +345,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -374,6 +411,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
index 8c2e102..3764b00 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -67,6 +70,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
required: false
@@ -77,6 +83,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml
index ab13eef..f468049 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.5.0)
range: text
+ required: true
+ equals_string: 2.5.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -374,7 +373,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -423,13 +422,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -448,8 +447,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -463,12 +462,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -486,11 +486,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -610,8 +610,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -694,6 +694,44 @@ classes:
range: ExperimentalConditionsTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
index dbde975..bdd9dd5 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -121,7 +135,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -142,7 +159,10 @@ classes:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
+ ifabsent: string(N/A)
range: text
+ required: true
+ equals_string: N/A
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
@@ -194,7 +214,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -275,7 +298,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -293,7 +319,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -311,7 +340,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -330,7 +362,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -349,7 +384,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -368,7 +406,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -387,7 +428,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -406,7 +450,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -443,7 +490,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -507,6 +557,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -541,6 +602,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
@@ -558,7 +622,10 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular electrode related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular electrode related metadata.
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
@@ -578,13 +645,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular stimulus related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular stimulus related metadata.
stimulus:
name: stimulus
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded stimulus for the
recording (rows).
range: TimeSeriesReferenceVectorData
@@ -603,13 +676,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular response related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular response related metadata.
response:
name: response
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded response for the
recording (rows)
range: TimeSeriesReferenceVectorData
@@ -641,7 +720,14 @@ classes:
name: description
description: Description of the contents of this table. Inherited from AlignedDynamicTable
and overwritten here to fix the value of the attribute.
+ ifabsent: string(A table to group together a stimulus and response from a
+ single electrode and a single simultaneous recording and for storing metadata
+ about the intracellular recording.)
range: text
+ required: true
+ equals_string: A table to group together a stimulus and response from a single
+ electrode and a single simultaneous recording and for storing metadata about
+ the intracellular recording.
electrodes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
@@ -687,6 +773,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the recordings column.
range: VectorIndex
required: true
@@ -710,6 +799,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
+ required: true
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
@@ -737,6 +827,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
@@ -769,6 +862,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
+ required: true
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
@@ -796,6 +890,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
@@ -819,6 +916,7 @@ classes:
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
+ required: true
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
@@ -843,6 +941,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
@@ -865,3 +966,4 @@ classes:
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml
index 169e913..0f6efd9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml
@@ -105,6 +105,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImageSeries__external_file:
name: ImageSeries__external_file
@@ -134,8 +145,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -153,6 +166,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -240,4 +264,26 @@ classes:
range: uint32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
+ indexed_images:
+ name: indexed_images
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Images
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
index 18d7b69..f663994 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -158,6 +160,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
@@ -170,6 +175,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -186,9 +202,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -263,6 +281,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -279,6 +300,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -300,6 +324,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -310,6 +337,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -390,6 +420,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
@@ -401,6 +434,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
@@ -426,3 +462,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
index 25902d8..adadc3e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
index ebaa877..e0c051a 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml
@@ -29,12 +29,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -50,6 +52,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -81,6 +94,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -94,8 +110,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -109,8 +125,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -128,8 +144,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -158,6 +174,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -176,6 +195,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -196,6 +218,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -225,18 +258,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -256,24 +295,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -282,14 +329,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -319,8 +568,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -349,4 +598,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
index 23a305e..3a624b1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
index df86c7a..e45d2cb 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml
@@ -41,6 +41,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
@@ -48,12 +50,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: true
multivalued: false
@@ -73,12 +79,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -107,8 +115,12 @@ classes:
name: name
range: string
required: true
- image:
- name: image
+ value:
+ name: value
+ annotations:
+ source_type:
+ tag: source_type
+ value: reference
description: Ordered dataset of references to Image objects.
range: Image
required: true
@@ -148,13 +160,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -239,7 +255,9 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
@@ -248,19 +266,23 @@ classes:
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
+ required: true
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
@@ -273,8 +295,9 @@ classes:
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -311,10 +334,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -339,8 +366,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -364,6 +391,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
@@ -376,6 +404,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Ordered dataset of references to Image objects stored in the
parent group. Each Image object in the Images group should be stored once
and only once, so the dataset should have the same length as the number
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
index 839f43b..650a4cd 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml
@@ -62,9 +62,11 @@ classes:
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -101,8 +103,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -115,8 +117,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -129,8 +131,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -142,8 +144,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -155,8 +157,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -172,8 +174,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -185,8 +187,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml
index a01cb4b..4dd254b 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
index 7c95205..5dba82c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml
@@ -37,6 +37,7 @@ classes:
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
+ required: false
data:
name: data
description: Recorded voltage data.
@@ -62,6 +63,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -177,6 +181,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -221,6 +228,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -229,8 +247,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -252,8 +270,8 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -267,8 +285,8 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -288,18 +306,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -314,18 +345,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -374,6 +411,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
index 7e9c4fc..fb0df61 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -67,6 +70,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
required: false
@@ -77,6 +83,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
index 150219c..f5d5d49 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.6.0)
range: text
+ required: true
+ equals_string: 2.6.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -374,7 +373,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -423,13 +422,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -448,8 +447,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -463,12 +462,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -486,11 +486,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -610,8 +610,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -694,6 +694,44 @@ classes:
range: ExperimentalConditionsTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
@@ -784,7 +822,9 @@ classes:
name: reference
description: Age is with reference to this event. Can be 'birth' or 'gestational'.
If reference is omitted, 'birth' is implied.
+ ifabsent: string(birth)
range: text
+ required: false
value:
name: value
range: text
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
index bb47c9e..b3181bc 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -121,7 +135,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -142,7 +159,10 @@ classes:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
+ ifabsent: string(N/A)
range: text
+ required: true
+ equals_string: N/A
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
@@ -194,7 +214,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -275,7 +298,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -293,7 +319,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -311,7 +340,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -330,7 +362,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -349,7 +384,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -368,7 +406,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -387,7 +428,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -406,7 +450,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -443,7 +490,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -507,6 +557,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -541,6 +602,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
@@ -558,7 +622,10 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular electrode related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular electrode related metadata.
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
@@ -578,13 +645,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular stimulus related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular stimulus related metadata.
stimulus:
name: stimulus
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded stimulus for the
recording (rows).
range: TimeSeriesReferenceVectorData
@@ -603,13 +676,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular response related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular response related metadata.
response:
name: response
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded response for the
recording (rows)
range: TimeSeriesReferenceVectorData
@@ -641,7 +720,14 @@ classes:
name: description
description: Description of the contents of this table. Inherited from AlignedDynamicTable
and overwritten here to fix the value of the attribute.
+ ifabsent: string(A table to group together a stimulus and response from a
+ single electrode and a single simultaneous recording and for storing metadata
+ about the intracellular recording.)
range: text
+ required: true
+ equals_string: A table to group together a stimulus and response from a single
+ electrode and a single simultaneous recording and for storing metadata about
+ the intracellular recording.
electrodes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
@@ -687,6 +773,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the recordings column.
range: VectorIndex
required: true
@@ -710,6 +799,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
+ required: true
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
@@ -737,6 +827,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
@@ -769,6 +862,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
+ required: true
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
@@ -796,6 +890,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
@@ -819,6 +916,7 @@ classes:
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
+ required: true
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
@@ -843,6 +941,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
@@ -865,3 +966,4 @@ classes:
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
index 114ea51..45bd0a3 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml
@@ -105,6 +105,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImageSeries__external_file:
name: ImageSeries__external_file
@@ -134,8 +145,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -153,6 +166,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -240,4 +264,26 @@ classes:
range: uint32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
+ indexed_images:
+ name: indexed_images
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Images
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
index d47eba8..56f8824 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -158,6 +160,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
@@ -170,6 +175,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -186,9 +202,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -263,6 +281,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -279,6 +300,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -300,6 +324,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -310,6 +337,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -390,6 +420,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
@@ -401,6 +434,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
@@ -426,3 +462,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
index 9d01859..93ab4af 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml
@@ -32,6 +32,17 @@ classes:
range: numeric
required: true
multivalued: false
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -62,4 +73,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
index f1d6bc1..80a1f6c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml
@@ -29,28 +29,45 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
exposure_time:
name: exposure_time
description: Exposure time of the sample; often the inverse of the frequency.
range: float32
+ required: false
binning:
name: binning
description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
range: uint8
+ required: false
power:
name: power
description: Power of the excitation in mW, if known.
range: float32
+ required: false
intensity:
name: intensity
description: Intensity of the excitation in mW/mm^2, if known.
range: float32
+ required: false
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
TwoPhotonSeries:
name: TwoPhotonSeries
@@ -65,12 +82,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -86,6 +105,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -117,6 +147,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -130,8 +163,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -145,8 +178,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -164,8 +197,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -194,6 +227,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -212,6 +248,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -232,6 +271,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -261,18 +311,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -292,24 +348,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -318,14 +382,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -355,8 +621,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -385,4 +651,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
index 629bea8..dc790f3 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml
index b21d698..21a57b1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml
@@ -41,6 +41,8 @@ classes:
description: Start index into the TimeSeries 'data' and 'timestamp' datasets
of the referenced TimeSeries. The first dimension of those arrays is always
time.
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
@@ -48,12 +50,16 @@ classes:
name: count
description: Number of data samples available in this time series, during
this epoch
+ array:
+ exact_number_dimensions: 1
range: int32
required: true
multivalued: false
timeseries:
name: timeseries
description: The TimeSeries that this index applies to
+ array:
+ exact_number_dimensions: 1
range: TimeSeries
required: true
multivalued: false
@@ -73,12 +79,14 @@ classes:
name: resolution
description: Pixel resolution of the image, in pixels per centimeter.
range: float32
+ required: false
description:
name: description
description: Description of the image.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -107,8 +115,12 @@ classes:
name: name
range: string
required: true
- image:
- name: image
+ value:
+ name: value
+ annotations:
+ source_type:
+ tag: source_type
+ value: reference
description: Ordered dataset of references to Image objects.
range: Image
required: true
@@ -148,13 +160,17 @@ classes:
description:
name: description
description: Description of the time series.
+ ifabsent: string(no description)
range: text
+ required: false
comments:
name: comments
description: Human-readable comments about the TimeSeries. This second descriptive
field can be used to store additional information, or descriptive information
if the primary description field is populated with a computer-readable string.
+ ifabsent: string(no comments)
range: text
+ required: false
data:
name: data
description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first
@@ -239,7 +255,9 @@ classes:
to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion'
multiplier to get from raw data acquisition values to recorded volts is
2.5/32768/8000 = 9.5367e-9.
+ ifabsent: float(1.0)
range: float32
+ required: false
offset:
name: offset
description: Scalar to add to the data after scaling by 'conversion' to finalize
@@ -248,19 +266,23 @@ classes:
to re-center the data, and (b) specialized recording devices that naturally
cause a scalar offset with respect to the true units.
range: float32
+ required: false
resolution:
name: resolution
description: Smallest meaningful difference between values in data, stored
in the specified by unit, e.g., the change in value of the least significant
bit, or a larger number if signal noise is known to be present. If unknown,
use -1.0.
+ ifabsent: float(-1.0)
range: float32
+ required: false
unit:
name: unit
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
+ required: true
continuity:
name: continuity
description: Optionally describe the continuity of the data. Can be "continuous",
@@ -273,8 +295,9 @@ classes:
the way this data is interpreted, the way it is visualized, and what analysis
methods are applicable.
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -311,10 +334,14 @@ classes:
name: rate
description: Sampling rate, in Hz.
range: float32
+ required: true
unit:
name: unit
description: Unit of measurement for time, which is fixed to 'seconds'.
+ ifabsent: string(seconds)
range: text
+ required: true
+ equals_string: seconds
value:
name: value
range: float64
@@ -339,8 +366,8 @@ classes:
description: A collection of processed data.
is_a: NWBContainer
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -364,6 +391,7 @@ classes:
name: description
description: Description of this collection of images.
range: text
+ required: true
image:
name: image
description: Images stored in this collection.
@@ -376,6 +404,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Ordered dataset of references to Image objects stored in the
parent group. Each Image object in the Images group should be stored once
and only once, so the dataset should have the same length as the number
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
index 91d2841..0df664e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml
@@ -62,9 +62,11 @@ classes:
value is 'meters'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(meters)
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -101,8 +103,8 @@ classes:
events. BehavioralTimeSeries is for continuous data.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -115,8 +117,8 @@ classes:
for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -129,8 +131,8 @@ classes:
of BehavioralEpochs for more details.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -142,8 +144,8 @@ classes:
description: Eye-tracking data, representing pupil size.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -155,8 +157,8 @@ classes:
description: Eye-tracking data, representing direction of gaze.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -172,8 +174,8 @@ classes:
be radians or degrees.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -185,8 +187,8 @@ classes:
description: Position data, whether along the x, x/y or x/y/z axis.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml
index 7719f95..ab2fc92 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml
@@ -28,8 +28,10 @@ classes:
description: Description of the device (e.g., model, firmware version, processing
software version, etc.) as free-form text.
range: text
+ required: false
manufacturer:
name: manufacturer
description: The name of the manufacturer of the device.
range: text
+ required: false
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
index f9525e7..54d0f4f 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml
@@ -37,6 +37,7 @@ classes:
at 300 Hz". If a non-standard filter type is used, provide as much detail
about the filter properties as possible.
range: text
+ required: false
data:
name: data
description: Recorded voltage data.
@@ -62,6 +63,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -177,6 +181,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the electrodes that this time series
was generated from.
range: DynamicTableRegion
@@ -221,6 +228,17 @@ classes:
range: float64
required: true
multivalued: false
+ source_electricalseries:
+ name: source_electricalseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ElectricalSeries
+ - range: string
tree_root: true
EventWaveform:
name: EventWaveform
@@ -229,8 +247,8 @@ classes:
during experiment acquisition.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -252,8 +270,8 @@ classes:
the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -267,8 +285,8 @@ classes:
properties should be noted in the ElectricalSeries 'filtering' attribute.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -288,18 +306,31 @@ classes:
name: description
description: Description of this electrode group.
range: text
+ required: true
location:
name: location
description: Location of electrode group. Specify the area, layer, comments
on estimation of area/layer, etc. Use standard atlas names for anatomical
regions when possible.
range: text
+ required: true
position:
name: position
description: stereotaxic or common framework coordinates
range: ElectrodeGroup__position
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ElectrodeGroup__position:
name: ElectrodeGroup__position
@@ -314,18 +345,24 @@ classes:
x:
name: x
description: x coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
y:
name: y
description: y coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
z:
name: z
description: z coordinate
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -374,6 +411,17 @@ classes:
range: float32
required: true
multivalued: false
+ clustering_interface:
+ name: clustering_interface
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Clustering
+ - range: string
tree_root: true
Clustering:
name: Clustering
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
index b4586cd..1885024 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml
@@ -57,6 +57,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for tags.
range: VectorIndex
required: false
@@ -67,6 +70,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: An index into a TimeSeries object.
range: TimeSeriesReferenceVectorData
required: false
@@ -77,6 +83,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for timeseries.
range: VectorIndex
required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml
index b846a65..1b56d9d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml
@@ -34,6 +34,7 @@ classes:
name: notes
description: Any notes the user has about the dataset being stored
range: text
+ required: true
tree_root: true
NWBFile:
name: NWBFile
@@ -51,7 +52,10 @@ classes:
name: nwb_version
description: File version string. Use semantic versioning, e.g. 1.2.1. This
will be the name of the format with trailing major, minor and patch numbers.
+ ifabsent: string(2.7.0)
range: text
+ required: true
+ equals_string: 2.7.0
file_create_date:
name: file_create_date
description: 'A record of the date the file was created and of subsequent
@@ -206,14 +210,9 @@ classes:
having a particular scientific goal, trials (see trials subgroup) during
an experiment, or epochs (see epochs subgroup) deriving from analysis of
data.
- multivalued: true
- inlined: true
- inlined_as_list: false
- any_of:
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
- - range: TimeIntervals
+ range: NWBFile__intervals
+ required: false
+ multivalued: false
units:
name: units
description: Data about sorted spike units.
@@ -376,7 +375,7 @@ classes:
name: source_script
description: Script file or link to public source code used to create this
NWB file.
- range: NWBFile__general__source_script
+ range: general__source_script
required: false
multivalued: false
stimulus:
@@ -425,13 +424,13 @@ classes:
extracellular_ephys:
name: extracellular_ephys
description: Metadata related to extracellular electrophysiology.
- range: NWBFile__general__extracellular_ephys
+ range: general__extracellular_ephys
required: false
multivalued: false
intracellular_ephys:
name: intracellular_ephys
description: Metadata related to intracellular electrophysiology.
- range: NWBFile__general__intracellular_ephys
+ range: general__intracellular_ephys
required: false
multivalued: false
optogenetics:
@@ -450,8 +449,8 @@ classes:
inlined_as_list: false
any_of:
- range: ImagingPlane
- NWBFile__general__source_script:
- name: NWBFile__general__source_script
+ general__source_script:
+ name: general__source_script
description: Script file or link to public source code used to create this NWB
file.
attributes:
@@ -465,12 +464,13 @@ classes:
name: file_name
description: Name of script file.
range: text
+ required: true
value:
name: value
range: text
required: true
- NWBFile__general__extracellular_ephys:
- name: NWBFile__general__extracellular_ephys
+ general__extracellular_ephys:
+ name: general__extracellular_ephys
description: Metadata related to extracellular electrophysiology.
attributes:
name:
@@ -488,11 +488,11 @@ classes:
electrodes:
name: electrodes
description: A table of all electrodes (i.e. channels) used for recording.
- range: NWBFile__general__extracellular_ephys__electrodes
+ range: extracellular_ephys__electrodes
required: false
multivalued: false
- NWBFile__general__extracellular_ephys__electrodes:
- name: NWBFile__general__extracellular_ephys__electrodes
+ extracellular_ephys__electrodes:
+ name: extracellular_ephys__electrodes
description: A table of all electrodes (i.e. channels) used for recording.
is_a: DynamicTable
attributes:
@@ -612,8 +612,8 @@ classes:
range: text
required: false
multivalued: false
- NWBFile__general__intracellular_ephys:
- name: NWBFile__general__intracellular_ephys
+ general__intracellular_ephys:
+ name: general__intracellular_ephys
description: Metadata related to intracellular electrophysiology.
attributes:
name:
@@ -696,6 +696,44 @@ classes:
range: ExperimentalConditionsTable
required: false
multivalued: false
+ NWBFile__intervals:
+ name: NWBFile__intervals
+ description: Experimental intervals, whether that be logically distinct sub-experiments
+ having a particular scientific goal, trials (see trials subgroup) during an
+ experiment, or epochs (see epochs subgroup) deriving from analysis of data.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(intervals)
+ range: string
+ required: true
+ equals_string: intervals
+ epochs:
+ name: epochs
+ description: Divisions in time marking experimental stages or sub-divisions
+ of a single recording session.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ trials:
+ name: trials
+ description: Repeated experimental events that have a logical grouping.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ invalid_times:
+ name: invalid_times
+ description: Time intervals that should be removed from analysis.
+ range: TimeIntervals
+ required: false
+ multivalued: false
+ time_intervals:
+ name: time_intervals
+ description: Optional additional table(s) for describing other experimental
+ time intervals.
+ range: TimeIntervals
+ required: false
+ multivalued: true
LabMetaData:
name: LabMetaData
description: Lab-specific meta-data.
@@ -786,7 +824,9 @@ classes:
name: reference
description: Age is with reference to this event. Can be 'birth' or 'gestational'.
If reference is omitted, 'birth' is implied.
+ ifabsent: string(birth)
range: text
+ required: false
value:
name: value
range: text
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
index fffe23a..710ba36 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml
@@ -29,10 +29,12 @@ classes:
name: stimulus_description
description: Protocol/stimulus name for this patch-clamp dataset.
range: text
+ required: true
sweep_number:
name: sweep_number
description: Sweep number, allows to group different PatchClampSeries together.
range: uint32
+ required: false
data:
name: data
description: Recorded voltage or current.
@@ -46,6 +48,17 @@ classes:
range: float32
required: false
multivalued: false
+ electrode:
+ name: electrode
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: IntracellularElectrode
+ - range: string
tree_root: true
PatchClampSeries__data:
name: PatchClampSeries__data
@@ -63,8 +76,9 @@ classes:
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion' and add 'offset'.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -121,7 +135,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -142,7 +159,10 @@ classes:
name: stimulus_description
description: An IZeroClampSeries has no stimulus, so this attribute is automatically
set to "N/A"
+ ifabsent: string(N/A)
range: text
+ required: true
+ equals_string: N/A
bias_current:
name: bias_current
description: Bias current, in amps, fixed to 0.0.
@@ -194,7 +214,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -275,7 +298,10 @@ classes:
fixed to 'amperes'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(amperes)
range: text
+ required: true
+ equals_string: amperes
value:
name: value
range: AnyType
@@ -293,7 +319,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -311,7 +340,10 @@ classes:
unit:
name: unit
description: Unit of measurement for capacitance_fast, which is fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -330,7 +362,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_bandwidth, which is fixed
to 'hertz'.
+ ifabsent: string(hertz)
range: text
+ required: true
+ equals_string: hertz
value:
name: value
range: float32
@@ -349,7 +384,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_correction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -368,7 +406,10 @@ classes:
name: unit
description: Unit of measurement for resistance_comp_prediction, which is
fixed to 'percent'.
+ ifabsent: string(percent)
range: text
+ required: true
+ equals_string: percent
value:
name: value
range: float32
@@ -387,7 +428,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_capacitance_comp, which is
fixed to 'farads'.
+ ifabsent: string(farads)
range: text
+ required: true
+ equals_string: farads
value:
name: value
range: float32
@@ -406,7 +450,10 @@ classes:
name: unit
description: Unit of measurement for whole_cell_series_resistance_comp, which
is fixed to 'ohms'.
+ ifabsent: string(ohms)
range: text
+ required: true
+ equals_string: ohms
value:
name: value
range: float32
@@ -443,7 +490,10 @@ classes:
fixed to 'volts'. Actual stored values are not necessarily stored in these
units. To access the data in these units, multiply 'data' by 'conversion'
and add 'offset'.
+ ifabsent: string(volts)
range: text
+ required: true
+ equals_string: volts
value:
name: value
range: AnyType
@@ -507,6 +557,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
SweepTable:
name: SweepTable
@@ -541,6 +602,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index for series.
range: VectorIndex
required: true
@@ -558,7 +622,10 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular electrode related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular electrode related metadata.
electrode:
name: electrode
description: Column for storing the reference to the intracellular electrode.
@@ -578,13 +645,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular stimulus related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular stimulus related metadata.
stimulus:
name: stimulus
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded stimulus for the
recording (rows).
range: TimeSeriesReferenceVectorData
@@ -596,6 +669,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the stimulus template for the
recording (rows).
range: TimeSeriesReferenceVectorData
@@ -614,13 +690,19 @@ classes:
description:
name: description
description: Description of what is in this dynamic table.
+ ifabsent: string(Table for storing intracellular response related metadata.)
range: text
+ required: true
+ equals_string: Table for storing intracellular response related metadata.
response:
name: response
annotations:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Column storing the reference to the recorded response for the
recording (rows)
range: TimeSeriesReferenceVectorData
@@ -652,7 +734,14 @@ classes:
name: description
description: Description of the contents of this table. Inherited from AlignedDynamicTable
and overwritten here to fix the value of the attribute.
+ ifabsent: string(A table to group together a stimulus and response from a
+ single electrode and a single simultaneous recording and for storing metadata
+ about the intracellular recording.)
range: text
+ required: true
+ equals_string: A table to group together a stimulus and response from a single
+ electrode and a single simultaneous recording and for storing metadata about
+ the intracellular recording.
electrodes:
name: electrodes
description: Table for storing intracellular electrode related metadata.
@@ -698,6 +787,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the recordings column.
range: VectorIndex
required: true
@@ -721,6 +813,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: IntracellularRecordingsTable
+ required: true
SequentialRecordingsTable:
name: SequentialRecordingsTable
description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable
@@ -748,6 +841,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the simultaneous_recordings column.
range: VectorIndex
required: true
@@ -780,6 +876,7 @@ classes:
table region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SimultaneousRecordingsTable
+ required: true
RepetitionsTable:
name: RepetitionsTable
description: A table for grouping different sequential intracellular recordings
@@ -807,6 +904,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the sequential_recordings column.
range: VectorIndex
required: true
@@ -830,6 +930,7 @@ classes:
region applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: SequentialRecordingsTable
+ required: true
ExperimentalConditionsTable:
name: ExperimentalConditionsTable
description: A table for grouping different intracellular recording repetitions
@@ -854,6 +955,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index dataset for the repetitions column.
range: VectorIndex
required: true
@@ -876,3 +980,4 @@ classes:
applies to. This specializes the attribute inherited from DynamicTableRegion
to fix the type of table that can be referenced here.
range: RepetitionsTable
+ required: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml
index 645839e..cac5d73 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml
@@ -105,6 +105,17 @@ classes:
range: text
required: false
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
ImageSeries__external_file:
name: ImageSeries__external_file
@@ -134,8 +145,10 @@ classes:
(and so there is a single element in the 'external_file' dataset), then
this attribute should have value [0].
range: int32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_files
@@ -153,6 +166,17 @@ classes:
name: name
range: string
required: true
+ masked_imageseries:
+ name: masked_imageseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
OpticalSeries:
name: OpticalSeries
@@ -240,4 +264,26 @@ classes:
range: uint32
required: true
multivalued: false
+ indexed_timeseries:
+ name: indexed_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
+ indexed_images:
+ name: indexed_images
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: Images
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml
index f48262a..e42c742 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
index 0b95ca2..9395fd9 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml
@@ -72,9 +72,11 @@ classes:
description: Since there can be different units for different features, store
the units in 'feature_units'. The default value for this attribute is "see
'feature_units'".
+ ifabsent: string(see 'feature_units')
range: text
- array:
- name: array
+ required: false
+ value:
+ name: value
range: numeric
any_of:
- array:
@@ -158,6 +160,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion pointer to the channels that this decomposition
series was generated from.
range: DynamicTableRegion
@@ -170,6 +175,17 @@ classes:
range: DecompositionSeries__bands
required: true
multivalued: false
+ source_timeseries:
+ name: source_timeseries
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: false
+ multivalued: false
+ any_of:
+ - range: TimeSeries
+ - range: string
tree_root: true
DecompositionSeries__data:
name: DecompositionSeries__data
@@ -186,9 +202,11 @@ classes:
description: Base unit of measurement for working with the data. Actual stored
values are not necessarily stored in these units. To access the data in
these units, multiply 'data' by 'conversion'.
+ ifabsent: string(no unit)
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_times
@@ -263,6 +281,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the spike_times dataset.
range: VectorIndex
required: false
@@ -279,6 +300,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the obs_intervals dataset.
range: VectorIndex
required: false
@@ -300,6 +324,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into electrodes.
range: VectorIndex
required: false
@@ -310,6 +337,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Electrode that each spike unit came from, specified using a DynamicTableRegion.
range: DynamicTableRegion
required: false
@@ -390,6 +420,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms dataset. One value for every spike event.
See 'waveforms' for more detail.
range: VectorIndex
@@ -401,6 +434,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into the waveforms_index dataset. One value for every unit
(row in the table). See 'waveforms' for more detail.
range: VectorIndex
@@ -426,3 +462,4 @@ classes:
if the acquisition time series was smoothed/interpolated and it is possible
for the spike time to be between samples.
range: float64
+ required: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
index 5c6344e..085004d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml
@@ -39,6 +39,17 @@ classes:
dimensions:
- alias: num_times
- alias: num_rois
+ site:
+ name: site
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: OptogeneticStimulusSite
+ - range: string
tree_root: true
OptogeneticStimulusSite:
name: OptogeneticStimulusSite
@@ -69,4 +80,15 @@ classes:
range: text
required: true
multivalued: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: Device
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
index b3004c8..053698d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml
@@ -29,28 +29,45 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
exposure_time:
name: exposure_time
description: Exposure time of the sample; often the inverse of the frequency.
range: float32
+ required: false
binning:
name: binning
description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.
range: uint8
+ required: false
power:
name: power
description: Power of the excitation in mW, if known.
range: float32
+ required: false
intensity:
name: intensity
description: Intensity of the excitation in mW/mm^2, if known.
range: float32
+ required: false
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
TwoPhotonSeries:
name: TwoPhotonSeries
@@ -65,12 +82,14 @@ classes:
name: pmt_gain
description: Photomultiplier gain.
range: float32
+ required: false
scan_line_rate:
name: scan_line_rate
description: Lines imaged per second. This is also stored in /general/optophysiology
but is kept here as it is useful information for analysis, and so good to
be stored w/ the actual data.
range: float32
+ required: false
field_of_view:
name: field_of_view
description: Width, height and depth of image, or imaged area, in meters.
@@ -86,6 +105,17 @@ classes:
dimensions:
- alias: width_height_depth
exact_cardinality: 3
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
RoiResponseSeries:
name: RoiResponseSeries
@@ -117,6 +147,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: DynamicTableRegion referencing into an ROITable containing information
on the ROIs stored in this timeseries.
range: DynamicTableRegion
@@ -130,8 +163,8 @@ classes:
for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -145,8 +178,8 @@ classes:
for ROIs and for image planes).
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -164,8 +197,8 @@ classes:
is required and ROI names should remain consistent between them.
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -194,6 +227,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into pixel_mask.
range: VectorIndex
required: false
@@ -212,6 +248,9 @@ classes:
named:
tag: named
value: true
+ source_type:
+ tag: source_type
+ value: neurodata_type_inc
description: Index into voxel_mask.
range: VectorIndex
required: false
@@ -232,6 +271,17 @@ classes:
inlined_as_list: false
any_of:
- range: ImageSeries
+ imaging_plane:
+ name: imaging_plane
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImagingPlane
+ - range: string
tree_root: true
PlaneSegmentation__image_mask:
name: PlaneSegmentation__image_mask
@@ -261,18 +311,24 @@ classes:
x:
name: x
description: Pixel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Pixel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the pixel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -292,24 +348,32 @@ classes:
x:
name: x
description: Voxel x-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
y:
name: y
description: Voxel y-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
z:
name: z
description: Voxel z-coordinate.
+ array:
+ exact_number_dimensions: 1
range: uint32
required: false
multivalued: false
weight:
name: weight
description: Weight of the voxel.
+ array:
+ exact_number_dimensions: 1
range: float32
required: false
multivalued: false
@@ -318,14 +382,216 @@ classes:
description: An imaging plane and its metadata.
is_a: NWBContainer
attributes:
- children:
- name: children
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of the imaging plane.
+ range: text
+ required: false
+ multivalued: false
+ excitation_lambda:
+ name: excitation_lambda
+ description: Excitation wavelength, in nm.
+ range: float32
+ required: true
+ multivalued: false
+ imaging_rate:
+ name: imaging_rate
+ description: Rate that images are acquired, in Hz. If the corresponding TimeSeries
+ is present, the rate should be stored there instead.
+ range: float32
+ required: false
+ multivalued: false
+ indicator:
+ name: indicator
+ description: Calcium indicator.
+ range: text
+ required: true
+ multivalued: false
+ location:
+ name: location
+ description: Location of the imaging plane. Specify the area, layer, comments
+ on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use
+ standard atlas names for anatomical regions when possible.
+ range: text
+ required: true
+ multivalued: false
+ manifold:
+ name: manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents
+ the position of the pixel relative to the defined coordinate space. Deprecated
+ in favor of origin_coords and grid_spacing.
+ range: ImagingPlane__manifold
+ required: false
+ multivalued: false
+ origin_coords:
+ name: origin_coords
+ description: Physical location of the first element of the imaging plane (0,
+ 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for
+ what the physical location is relative to (e.g., bregma).
+ range: ImagingPlane__origin_coords
+ required: false
+ multivalued: false
+ grid_spacing:
+ name: grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also
+ reference_frame to interpret the grid.
+ range: ImagingPlane__grid_spacing
+ required: false
+ multivalued: false
+ reference_frame:
+ name: reference_frame
+ description: Describes reference frame of origin_coords and grid_spacing.
+ For example, this can be a text description of the anatomical location and
+ orientation of the grid defined by origin_coords and grid_spacing or the
+ vectors needed to transform or rotate the grid to a common anatomical axis
+ (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and
+ grid_spacing. If origin_coords and grid_spacing are not present, then this
+ field is not required. For example, if the microscope takes 10 x 10 x 2
+ images, where the first value of the data matrix (index (0, 0, 0)) corresponds
+ to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is
+ 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means
+ more anterior, larger numbers in y means more rightward, and larger numbers
+ in z means more ventral, then enter the following -- origin_coords = (-1.2,
+ -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates
+ are relative to bregma. First dimension corresponds to anterior-posterior
+ axis (larger index = more anterior). Second dimension corresponds to medial-lateral
+ axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral
+ axis (larger index = more ventral)."
+ range: text
+ required: false
+ multivalued: false
+ optical_channel:
+ name: optical_channel
+ description: An optical channel used to record from an imaging plane.
+ range: OpticalChannel
+ required: true
multivalued: true
- inlined: true
- inlined_as_list: false
+ device:
+ name: device
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
any_of:
- - range: OpticalChannel
+ - range: Device
+ - range: string
tree_root: true
+ ImagingPlane__manifold:
+ name: ImagingPlane__manifold
+ description: DEPRECATED Physical position of each pixel. 'xyz' represents the
+ position of the pixel relative to the defined coordinate space. Deprecated in
+ favor of origin_coords and grid_spacing.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(manifold)
+ range: string
+ required: true
+ equals_string: manifold
+ conversion:
+ name: conversion
+ description: Scalar to multiply each element in data to convert it to the
+ specified 'unit'. If the data are stored in acquisition system units or
+ other units that require a conversion to be interpretable, multiply the
+ data by 'conversion' to convert the data to the specified 'unit'. e.g. if
+ the data acquisition system stores values in this object as pixels from
+ x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then
+ the 'conversion' multiplier to get from raw data acquisition pixel units
+ to meters is 2/1000.
+ ifabsent: float(1.0)
+ range: float32
+ required: false
+ unit:
+ name: unit
+ description: Base unit of measurement for working with the data. The default
+ value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: false
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: x_y_z
+ exact_cardinality: 3
+ - array:
+ dimensions:
+ - alias: height
+ - alias: width
+ - alias: depth
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__origin_coords:
+ name: ImagingPlane__origin_coords
+ description: Physical location of the first element of the imaging plane (0, 0)
+ for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the
+ physical location is relative to (e.g., bregma).
+ attributes:
+ name:
+ name: name
+ ifabsent: string(origin_coords)
+ range: string
+ required: true
+ equals_string: origin_coords
+ unit:
+ name: unit
+ description: Measurement units for origin_coords. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
+ ImagingPlane__grid_spacing:
+ name: ImagingPlane__grid_spacing
+ description: Space between pixels in (x, y) or voxels in (x, y, z) directions,
+ in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame
+ to interpret the grid.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(grid_spacing)
+ range: string
+ required: true
+ equals_string: grid_spacing
+ unit:
+ name: unit
+ description: Measurement units for grid_spacing. The default value is 'meters'.
+ ifabsent: string(meters)
+ range: text
+ required: true
+ value:
+ name: value
+ range: float32
+ any_of:
+ - array:
+ dimensions:
+ - alias: x_y
+ exact_cardinality: 2
+ - array:
+ dimensions:
+ - alias: x_y_z
+ exact_cardinality: 3
OpticalChannel:
name: OpticalChannel
description: An optical channel used to record from an imaging plane.
@@ -355,8 +621,8 @@ classes:
frame at each point in time is assumed to be 2-D (has only x & y dimensions).'
is_a: NWBDataInterface
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
@@ -385,4 +651,15 @@ classes:
range: TimeSeries
required: true
multivalued: false
+ original:
+ name: original
+ annotations:
+ source_type:
+ tag: source_type
+ value: link
+ required: true
+ multivalued: false
+ any_of:
+ - range: ImageSeries
+ - range: string
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
index 4bcc17a..6416821 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml
@@ -106,16 +106,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -137,16 +142,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -167,16 +177,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -198,16 +213,21 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
unit:
name: unit
description: Unit that axis data is stored in (e.g., degrees).
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -229,25 +249,32 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value.
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
focal_depth:
name: focal_depth
description: Focal depth offset, in meters.
range: float32
+ required: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -269,12 +296,16 @@ classes:
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
- array:
- name: array
+ required: true
+ multivalued: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -296,21 +327,27 @@ classes:
description: Number of bits used to represent each value. This is necessary
to determine maximum (white) pixel value
range: int32
+ required: true
dimension:
name: dimension
description: 'Number of rows and columns in the image. NOTE: row, column representation
is equivalent to height, width.'
range: int32
+ required: true
+ multivalued: true
field_of_view:
name: field_of_view
description: Size of viewing area, in meters.
range: float32
+ required: true
+ multivalued: true
format:
name: format
description: Format of image. Right now only 'raw' is supported.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml
index 68f0304..50aeafe 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml
index 6c1dbe1..8220620 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml
@@ -23,7 +23,13 @@ classes:
shape:
name: shape
description: the shape of this sparse matrix
+ array:
+ dimensions:
+ - alias: 'null'
+ exact_cardinality: 2
range: int
+ required: true
+ multivalued: false
indices:
name: indices
description: column indices
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml
index f7a2a0a..27a272c 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml
@@ -34,6 +34,7 @@ classes:
name: target
description: Target dataset that this index applies to.
range: Data
+ required: true
tree_root: true
VectorData:
name: VectorData
@@ -55,6 +56,7 @@ classes:
name: description
description: Description of what these vectors represent.
range: text
+ required: true
tree_root: true
VectorIndex:
name: VectorIndex
@@ -71,6 +73,7 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
+ required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
@@ -106,10 +109,12 @@ classes:
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
+ required: true
description:
name: description
description: Description of what this table region points to.
range: text
+ required: true
tree_root: true
Container:
name: Container
@@ -155,10 +160,13 @@ classes:
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
+ required: true
+ multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
+ required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml
index 68f0304..50aeafe 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml
index 5c825a8..42a1170 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml
@@ -23,7 +23,13 @@ classes:
shape:
name: shape
description: the shape of this sparse matrix
+ array:
+ dimensions:
+ - alias: 'null'
+ exact_cardinality: 2
range: int
+ required: true
+ multivalued: false
indices:
name: indices
description: column indices
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml
index dec9fc7..fe82d7d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml
@@ -34,6 +34,7 @@ classes:
name: target
description: Target dataset that this index applies to.
range: Data
+ required: true
tree_root: true
VectorData:
name: VectorData
@@ -55,6 +56,7 @@ classes:
name: description
description: Description of what these vectors represent.
range: text
+ required: true
tree_root: true
VectorIndex:
name: VectorIndex
@@ -71,6 +73,7 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
+ required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
@@ -106,10 +109,12 @@ classes:
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
+ required: true
description:
name: description
description: Description of what this table region points to.
range: text
+ required: true
tree_root: true
Container:
name: Container
@@ -155,10 +160,13 @@ classes:
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
+ required: true
+ multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
+ required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml
index 68f0304..50aeafe 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-common
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml
index a7b2bb8..bdb5f39 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml
@@ -23,7 +23,13 @@ classes:
shape:
name: shape
description: the shape of this sparse matrix
+ array:
+ dimensions:
+ - alias: 'null'
+ exact_cardinality: 2
range: int
+ required: true
+ multivalued: false
indices:
name: indices
description: column indices
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml
index 48a60ac..4285b03 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml
@@ -34,6 +34,7 @@ classes:
name: target
description: Target dataset that this index applies to.
range: Data
+ required: true
tree_root: true
VectorData:
name: VectorData
@@ -55,8 +56,9 @@ classes:
name: description
description: Description of what these vectors represent.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -93,8 +95,9 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
- array:
- name: array
+ required: true
+ value:
+ name: value
array:
dimensions:
- alias: num_rows
@@ -134,10 +137,12 @@ classes:
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
+ required: true
description:
name: description
description: Description of what this table region points to.
range: text
+ required: true
tree_root: true
Container:
name: Container
@@ -183,10 +188,13 @@ classes:
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
+ required: true
+ multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
+ required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml
new file mode 100644
index 0000000..ff30beb
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml
@@ -0,0 +1,33 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.2.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..50aeafe
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..c32033d
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml
@@ -0,0 +1,81 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.2.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: a compressed sparse row matrix
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: the shape of this sparse matrix
+ array:
+ dimensions:
+ - alias: 'null'
+ exact_cardinality: 2
+ range: int
+ required: true
+ multivalued: false
+ indices:
+ name: indices
+ description: column indices
+ range: CSRMatrix__indices
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: index pointer
+ range: CSRMatrix__indptr
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: values in the matrix
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__indices:
+ name: CSRMatrix__indices
+ description: column indices
+ attributes:
+ name:
+ name: name
+ ifabsent: string(indices)
+ range: string
+ required: true
+ equals_string: indices
+ CSRMatrix__indptr:
+ name: CSRMatrix__indptr
+ description: index pointer
+ attributes:
+ name:
+ name: name
+ ifabsent: string(indptr)
+ range: string
+ required: true
+ equals_string: indptr
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: values in the matrix
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml
new file mode 100644
index 0000000..7746e8e
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml
@@ -0,0 +1,193 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.2.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ VocabData:
+ name: VocabData
+ description: Data that come from a controlled vocabulary of text values. A data
+ value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ vocabulary:
+ name: vocabulary
+ description: The available items in the controlled vocabulary.
+ array:
+ dimensions:
+ - alias: 'null'
+ range: text
+ required: true
+ multivalued: false
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml
new file mode 100644
index 0000000..7befc87
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.2.0
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml
new file mode 100644
index 0000000..9ef70fc
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.2.1
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..50aeafe
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..3168d8a
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml
@@ -0,0 +1,83 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.2.1
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: a compressed sparse row matrix
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: the shape of this sparse matrix
+ array:
+ dimensions:
+ - alias: 'null'
+ exact_cardinality: 2
+ range: int
+ required: true
+ multivalued: false
+ indices:
+ name: indices
+ description: column indices
+ range: CSRMatrix__indices
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: index pointer
+ range: CSRMatrix__indptr
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: values in the matrix
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__indices:
+ name: CSRMatrix__indices
+ description: column indices
+ attributes:
+ name:
+ name: name
+ ifabsent: string(indices)
+ range: string
+ required: true
+ equals_string: indices
+ CSRMatrix__indptr:
+ name: CSRMatrix__indptr
+ description: index pointer
+ attributes:
+ name:
+ name: name
+ ifabsent: string(indptr)
+ range: string
+ required: true
+ equals_string: indptr
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: values in the matrix
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml
new file mode 100644
index 0000000..2ce11ab
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml
@@ -0,0 +1,193 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.2.1
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ VocabData:
+ name: VocabData
+ description: Data that come from a controlled vocabulary of text values. A data
+ value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ vocabulary:
+ name: vocabulary
+ description: The available items in the controlled vocabulary.
+ array:
+ dimensions:
+ - alias: 'null'
+ range: text
+ required: true
+ multivalued: false
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml
new file mode 100644
index 0000000..e29bfb9
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.2.1
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml
new file mode 100644
index 0000000..1cfb2bc
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.3.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers.
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..50aeafe
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml
new file mode 100644
index 0000000..3bbb768
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml
@@ -0,0 +1,176 @@
+name: hdmf-common.resources
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.resources
+version: 1.3.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.resources/
+classes:
+ ExternalResources:
+ name: ExternalResources
+ description: 'A set of four tables for tracking external resource references in
+ a file. NOTE: this data type is in beta testing and is subject to change in
+ a later version.'
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ keys:
+ name: keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ range: ExternalResources__keys
+ required: true
+ multivalued: false
+ resources:
+ name: resources
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ range: ExternalResources__resources
+ required: true
+ multivalued: false
+ objects:
+ name: objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ range: ExternalResources__objects
+ required: true
+ multivalued: false
+ object_keys:
+ name: object_keys
+ description: A table for identifying which objects use which keys.
+ range: ExternalResources__object_keys
+ required: true
+ multivalued: false
+ tree_root: true
+ ExternalResources__keys:
+ name: ExternalResources__keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(keys)
+ range: string
+ required: true
+ equals_string: keys
+ key_name:
+ name: key_name
+ description: The user term that maps to one or more resources in the 'resources'
+ table.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__resources:
+ name: ExternalResources__resources
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(resources)
+ range: string
+ required: true
+ equals_string: resources
+ keytable_idx:
+ name: keytable_idx
+ description: The index to the key in the 'keys' table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ resource_name:
+ name: resource_name
+ description: The name of the online resource (e.g., website, database) that
+ has the entity.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ resource_id:
+ name: resource_id
+ description: The unique identifier for the resource entity at the resource.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ uri:
+ name: uri
+ description: The URI for the resource entity this reference applies to. This
+ can be an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__objects:
+ name: ExternalResources__objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(objects)
+ range: string
+ required: true
+ equals_string: objects
+ object_id:
+ name: object_id
+ description: The UUID for the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ field:
+ name: field
+ description: The field of the object. This can be an empty string if the object
+ is a dataset and the field is the dataset values.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__object_keys:
+ name: ExternalResources__object_keys
+ description: A table for identifying which objects use which keys.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(object_keys)
+ range: string
+ required: true
+ equals_string: object_keys
+ objecttable_idx:
+ name: objecttable_idx
+ description: The index to the 'objects' table for the object that holds the
+ key.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ keytable_idx:
+ name: keytable_idx
+ description: The index to the 'keys' table for the key.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..55db34f
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml
@@ -0,0 +1,68 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.3.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: A compressed sparse row matrix. Data are stored in the standard CSR
+ format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
+ and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: The shape (number of rows, number of columns) of this sparse
+ matrix.
+ range: uint
+ required: true
+ multivalued: true
+ indices:
+ name: indices
+ description: The column indices.
+ array:
+ dimensions:
+ - alias: number_of_non_zero_values
+ range: uint
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: The row index pointer.
+ array:
+ dimensions:
+ - alias: number_of_rows_in_the_matrix_1
+ range: uint
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: The non-zero values in the matrix.
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: The non-zero values in the matrix.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml
new file mode 100644
index 0000000..cae8e9e
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml
@@ -0,0 +1,193 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.3.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ VocabData:
+ name: VocabData
+ description: Data that come from a controlled vocabulary of text values. A data
+ value of i corresponds to the i-th element in the 'vocabulary' array attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ vocabulary:
+ name: vocabulary
+ description: The available items in the controlled vocabulary.
+ array:
+ dimensions:
+ - alias: 'null'
+ range: text
+ required: true
+ multivalued: false
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml
new file mode 100644
index 0000000..11885e7
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml
@@ -0,0 +1,18 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.3.0
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.resources
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml
new file mode 100644
index 0000000..6495eb4
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.4.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers.
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..e3d3df3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..13b5f58
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml
@@ -0,0 +1,68 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.4.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: A compressed sparse row matrix. Data are stored in the standard CSR
+ format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
+ and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: The shape (number of rows, number of columns) of this sparse
+ matrix.
+ range: uint
+ required: true
+ multivalued: true
+ indices:
+ name: indices
+ description: The column indices.
+ array:
+ dimensions:
+ - alias: number_of_non_zero_values
+ range: uint
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: The row index pointer.
+ array:
+ dimensions:
+ - alias: number_of_rows_in_the_matrix_1
+ range: uint
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: The non-zero values in the matrix.
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: The non-zero values in the matrix.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml
new file mode 100644
index 0000000..a88c85f
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml
@@ -0,0 +1,173 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.4.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml
new file mode 100644
index 0000000..50680da
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.4.0
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
index e57c52a..1244aae 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml
@@ -36,8 +36,8 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
index 68f0304..e3d3df3 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml
index c5ec31d..24ea8fd 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml
@@ -29,6 +29,8 @@ classes:
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
+ required: true
+ multivalued: true
indices:
name: indices
description: The column indices.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
index cbaf95a..9ed7bc1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml
@@ -33,8 +33,9 @@ classes:
name: description
description: Description of what these vectors represent.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -73,6 +74,7 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
+ required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
@@ -108,10 +110,12 @@ classes:
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
+ required: true
description:
name: description
description: Description of what this table region points to.
range: text
+ required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -144,10 +148,13 @@ classes:
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
+ required: true
+ multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
+ required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
@@ -175,8 +182,8 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml
new file mode 100644
index 0000000..ca0d043
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.5.1
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers.
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..e3d3df3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..21654df
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml
@@ -0,0 +1,68 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.5.1
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: A compressed sparse row matrix. Data are stored in the standard CSR
+ format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
+ and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: The shape (number of rows, number of columns) of this sparse
+ matrix.
+ range: uint
+ required: true
+ multivalued: true
+ indices:
+ name: indices
+ description: The column indices.
+ array:
+ dimensions:
+ - alias: number_of_non_zero_values
+ range: uint
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: The row index pointer.
+ array:
+ dimensions:
+ - alias: number_of_rows_in_the_matrix_1
+ range: uint
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: The non-zero values in the matrix.
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: The non-zero values in the matrix.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml
new file mode 100644
index 0000000..3849f90
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml
@@ -0,0 +1,192 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.5.1
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
+ AlignedDynamicTable:
+ name: AlignedDynamicTable
+ description: DynamicTable container that supports storing a collection of sub-tables.
+ Each sub-table is a DynamicTable itself that is aligned with the main table
+ by row index. I.e., all DynamicTables stored in this group MUST have the same
+ number of rows. This type effectively defines a 2-level table in which the main
+ data is stored in the main table implemented by this type and additional columns
+ of the table are grouped into categories, with each category being represented
+ by a separate DynamicTable stored within the group.
+ is_a: DynamicTable
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml
new file mode 100644
index 0000000..917870d
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.5.1
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml
new file mode 100644
index 0000000..293c18a
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.6.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers.
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..e3d3df3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..7ed736f
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml
@@ -0,0 +1,68 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.6.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: A compressed sparse row matrix. Data are stored in the standard CSR
+ format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
+ and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: The shape (number of rows, number of columns) of this sparse
+ matrix.
+ range: uint
+ required: true
+ multivalued: true
+ indices:
+ name: indices
+ description: The column indices.
+ array:
+ dimensions:
+ - alias: number_of_non_zero_values
+ range: uint
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: The row index pointer.
+ array:
+ dimensions:
+ - alias: number_of_rows_in_the_matrix_1
+ range: uint
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: The non-zero values in the matrix.
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: The non-zero values in the matrix.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml
new file mode 100644
index 0000000..ea22ad5
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml
@@ -0,0 +1,192 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.6.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
+ AlignedDynamicTable:
+ name: AlignedDynamicTable
+ description: DynamicTable container that supports storing a collection of sub-tables.
+ Each sub-table is a DynamicTable itself that is aligned with the main table
+ by row index. I.e., all DynamicTables stored in this group MUST have the same
+ number of rows. This type effectively defines a 2-level table in which the main
+ data is stored in the main table implemented by this type and additional columns
+ of the table are grouped into categories, with each category being represented
+ by a separate DynamicTable stored within the group.
+ is_a: DynamicTable
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml
new file mode 100644
index 0000000..241b849
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.6.0
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml
new file mode 100644
index 0000000..1b7dcb9
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml
@@ -0,0 +1,46 @@
+name: hdmf-common.base
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.base
+version: 1.7.0
+imports:
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.base/
+classes:
+ Data:
+ name: Data
+ description: An abstract data type for a dataset.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ Container:
+ name: Container
+ description: An abstract data type for a group storing collections of data and
+ metadata. Base type for all data and metadata containers.
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ tree_root: true
+ SimpleMultiContainer:
+ name: SimpleMultiContainer
+ description: A simple Container for holding onto multiple containers.
+ is_a: Container
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: Container
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml
new file mode 100644
index 0000000..e3d3df3
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-common.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml
new file mode 100644
index 0000000..6167b42
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml
@@ -0,0 +1,68 @@
+name: hdmf-common.sparse
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.sparse
+version: 1.7.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.sparse/
+classes:
+ CSRMatrix:
+ name: CSRMatrix
+ description: A compressed sparse row matrix. Data are stored in the standard CSR
+ format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]]
+ and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ shape:
+ name: shape
+ description: The shape (number of rows, number of columns) of this sparse
+ matrix.
+ range: uint
+ required: true
+ multivalued: true
+ indices:
+ name: indices
+ description: The column indices.
+ array:
+ dimensions:
+ - alias: number_of_non_zero_values
+ range: uint
+ required: true
+ multivalued: false
+ indptr:
+ name: indptr
+ description: The row index pointer.
+ array:
+ dimensions:
+ - alias: number_of_rows_in_the_matrix_1
+ range: uint
+ required: true
+ multivalued: false
+ data:
+ name: data
+ description: The non-zero values in the matrix.
+ range: CSRMatrix__data
+ required: true
+ multivalued: false
+ tree_root: true
+ CSRMatrix__data:
+ name: CSRMatrix__data
+ description: The non-zero values in the matrix.
+ attributes:
+ name:
+ name: name
+ ifabsent: string(data)
+ range: string
+ required: true
+ equals_string: data
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml
new file mode 100644
index 0000000..8149ebe
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml
@@ -0,0 +1,192 @@
+name: hdmf-common.table
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-common
+id: hdmf-common.table
+version: 1.7.0
+imports:
+- hdmf-common.base
+- hdmf-common.nwb.language
+default_prefix: hdmf-common.table/
+classes:
+ VectorData:
+ name: VectorData
+ description: An n-dimensional dataset representing a column of a DynamicTable.
+ If used without an accompanying VectorIndex, first dimension is along the rows
+ of the DynamicTable and each step along the first dimension is a cell of the
+ larger table. VectorData can also be used to represent a ragged array if paired
+ with a VectorIndex. This allows for storing arrays of varying length in a single
+ cell of the DynamicTable by indexing into this VectorData. The first vector
+ is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]],
+ and so on.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ description:
+ name: description
+ description: Description of what these vectors represent.
+ range: text
+ required: true
+ value:
+ name: value
+ range: AnyType
+ any_of:
+ - array:
+ dimensions:
+ - alias: dim0
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - array:
+ dimensions:
+ - alias: dim0
+ - alias: dim1
+ - alias: dim2
+ - alias: dim3
+ tree_root: true
+ VectorIndex:
+ name: VectorIndex
+ description: Used with VectorData to encode a ragged array. An array of indices
+ into the first dimension of the target VectorData, and forming a map between
+ the rows of a DynamicTable and the indices of the VectorData. The name of the
+ VectorIndex is expected to be the name of the target VectorData object followed
+ by "_index".
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ target:
+ name: target
+ description: Reference to the target dataset that this index applies to.
+ range: VectorData
+ required: true
+ tree_root: true
+ ElementIdentifiers:
+ name: ElementIdentifiers
+ description: A list of unique identifiers for values within a dataset, e.g. rows
+ of a DynamicTable.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(element_id)
+ range: string
+ required: true
+ tree_root: true
+ DynamicTableRegion:
+ name: DynamicTableRegion
+ description: DynamicTableRegion provides a link from one table to an index or
+ region of another. The `table` attribute is a link to another `DynamicTable`,
+ indicating which table is referenced, and the data is int(s) indicating the
+ row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to
+ associate rows with repeated meta-data without data duplication. They can also
+ be used to create hierarchical relationships between multiple `DynamicTable`s.
+ `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create
+ ragged references, so a single cell of a `DynamicTable` can reference many rows
+ of another `DynamicTable`.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ table:
+ name: table
+ description: Reference to the DynamicTable object that this region applies
+ to.
+ range: DynamicTable
+ required: true
+ description:
+ name: description
+ description: Description of what this table region points to.
+ range: text
+ required: true
+ tree_root: true
+ DynamicTable:
+ name: DynamicTable
+ description: A group containing multiple datasets that are aligned on the first
+ dimension (Currently, this requirement if left up to APIs to check and enforce).
+ These datasets represent different columns in the table. Apart from a column
+ that contains unique identifiers for each row, there are no other required datasets.
+ Users are free to add any number of custom VectorData objects (columns) here.
+ DynamicTable also supports ragged array columns, where each element can be of
+ a different size. To add a ragged array column, use a VectorIndex type to index
+ the corresponding VectorData type. See documentation for VectorData and VectorIndex
+ for more details. Unlike a compound data type, which is analogous to storing
+ an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays.
+ This provides an alternative structure to choose from when optimizing storage
+ for anticipated access patterns. Additionally, this type provides a way of creating
+ a table without having to define a compound type up front. Although this convenience
+ may be attractive, users should think carefully about how data will be accessed.
+ DynamicTable is more appropriate for column-centric access, whereas a dataset
+ with a compound type would be more appropriate for row-centric access. Finally,
+ data size should also be taken into account. For small tables, performance loss
+ may be an acceptable trade-off for the flexibility of a DynamicTable.
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ colnames:
+ name: colnames
+ description: The names of the columns in this table. This should be used to
+ specify an order to the columns.
+ range: text
+ required: true
+ multivalued: true
+ description:
+ name: description
+ description: Description of what is in this dynamic table.
+ range: text
+ required: true
+ id:
+ name: id
+ description: Array of unique identifiers for the rows of this dynamic table.
+ array:
+ dimensions:
+ - alias: num_rows
+ range: int
+ required: true
+ multivalued: false
+ vector_data:
+ name: vector_data
+ description: Vector columns, including index columns, of this dynamic table.
+ range: VectorData
+ required: false
+ multivalued: true
+ tree_root: true
+ AlignedDynamicTable:
+ name: AlignedDynamicTable
+ description: DynamicTable container that supports storing a collection of sub-tables.
+ Each sub-table is a DynamicTable itself that is aligned with the main table
+ by row index. I.e., all DynamicTables stored in this group MUST have the same
+ number of rows. This type effectively defines a 2-level table in which the main
+ data is stored in the main table implemented by this type and additional columns
+ of the table are grouped into categories, with each category being represented
+ by a separate DynamicTable stored within the group.
+ is_a: DynamicTable
+ attributes:
+ value:
+ name: value
+ multivalued: true
+ inlined: true
+ inlined_as_list: false
+ any_of:
+ - range: DynamicTable
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml
new file mode 100644
index 0000000..b689554
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-common
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-common
+description: Common data structures provided by HDMF
+id: hdmf-common
+version: 1.7.0
+imports:
+- hdmf-common.base
+- hdmf-common.table
+- hdmf-common.sparse
+- hdmf-common.nwb.language
+default_prefix: hdmf-common/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
index 0365fab..b03629e 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml
@@ -36,8 +36,8 @@ classes:
description: A simple Container for holding onto multiple containers.
is_a: Container
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
index 68f0304..e3d3df3 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml
index 74e9c04..842d1d6 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml
@@ -29,6 +29,8 @@ classes:
description: The shape (number of rows, number of columns) of this sparse
matrix.
range: uint
+ required: true
+ multivalued: true
indices:
name: indices
description: The column indices.
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
index ecd7b48..938ab2d 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml
@@ -33,8 +33,9 @@ classes:
name: description
description: Description of what these vectors represent.
range: text
- array:
- name: array
+ required: true
+ value:
+ name: value
range: AnyType
any_of:
- array:
@@ -73,6 +74,7 @@ classes:
name: target
description: Reference to the target dataset that this index applies to.
range: VectorData
+ required: true
tree_root: true
ElementIdentifiers:
name: ElementIdentifiers
@@ -108,10 +110,12 @@ classes:
description: Reference to the DynamicTable object that this region applies
to.
range: DynamicTable
+ required: true
description:
name: description
description: Description of what this table region points to.
range: text
+ required: true
tree_root: true
DynamicTable:
name: DynamicTable
@@ -144,10 +148,13 @@ classes:
description: The names of the columns in this table. This should be used to
specify an order to the columns.
range: text
+ required: true
+ multivalued: true
description:
name: description
description: Description of what is in this dynamic table.
range: text
+ required: true
id:
name: id
description: Array of unique identifiers for the rows of this dynamic table.
@@ -175,8 +182,8 @@ classes:
by a separate DynamicTable stored within the group.
is_a: DynamicTable
attributes:
- children:
- name: children
+ value:
+ name: value
multivalued: true
inlined: true
inlined_as_list: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml
index c14e264..064f647 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml
@@ -9,7 +9,7 @@ annotations:
id: hdmf-experimental.experimental
version: 0.1.0
imports:
-- ../../hdmf_common/v1_5_0/namespace
+- ../../hdmf_common/v1_4_0/namespace
- hdmf-experimental.nwb.language
default_prefix: hdmf-experimental.experimental/
classes:
@@ -28,4 +28,5 @@ classes:
description: Reference to the VectorData object that contains the enumerable
elements
range: VectorData
+ required: true
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
index 5bb0e2b..0a824ca 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
index 17a7d9d..05dc855 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml
@@ -9,7 +9,7 @@ annotations:
id: hdmf-experimental.resources
version: 0.1.0
imports:
-- ../../hdmf_common/v1_5_0/namespace
+- ../../hdmf_common/v1_4_0/namespace
- hdmf-experimental.nwb.language
default_prefix: hdmf-experimental.resources/
classes:
@@ -73,6 +73,8 @@ classes:
name: key
description: The user term that maps to one or more resources in the 'resources'
table.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -90,18 +92,24 @@ classes:
keys_idx:
name: keys_idx
description: The index to the key in the 'keys' table.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
resources_idx:
name: resources_idx
description: The index into the 'resources' table
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
entity_id:
name: entity_id
description: The unique identifier entity.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -109,6 +117,8 @@ classes:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
an empty string.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -126,12 +136,16 @@ classes:
resource:
name: resource
description: The name of the resource.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
resource_uri:
name: resource_uri
description: The URI for the resource. This can be an empty string.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -150,6 +164,8 @@ classes:
object_id:
name: object_id
description: The UUID for the object.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -157,6 +173,8 @@ classes:
name: field
description: The field of the object. This can be an empty string if the object
is a dataset and the field is the dataset values.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -175,12 +193,16 @@ classes:
name: objects_idx
description: The index to the 'objects' table for the object that holds the
key.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
keys_idx:
name: keys_idx
description: The index to the 'keys' table for the key.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml
new file mode 100644
index 0000000..94b3194
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml
@@ -0,0 +1,32 @@
+name: hdmf-experimental.experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.experimental
+version: 0.2.0
+imports:
+- ../../hdmf_common/v1_5_1/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.experimental/
+classes:
+ EnumData:
+ name: EnumData
+ description: Data that come from a fixed set of values. A data value of i corresponds
+ to the i-th value in the VectorData referenced by the 'elements' attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ elements:
+ name: elements
+ description: Reference to the VectorData object that contains the enumerable
+ elements
+ range: VectorData
+ required: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml
new file mode 100644
index 0000000..0a824ca
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-experimental.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml
new file mode 100644
index 0000000..a1b6ec0
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml
@@ -0,0 +1,220 @@
+name: hdmf-experimental.resources
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.resources
+version: 0.2.0
+imports:
+- ../../hdmf_common/v1_5_1/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.resources/
+classes:
+ ExternalResources:
+ name: ExternalResources
+ description: 'A set of four tables for tracking external resource references in
+ a file. NOTE: this data type is in beta testing and is subject to change in
+ a later version.'
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ keys:
+ name: keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ range: ExternalResources__keys
+ required: true
+ multivalued: false
+ entities:
+ name: entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ range: ExternalResources__entities
+ required: true
+ multivalued: false
+ resources:
+ name: resources
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ range: ExternalResources__resources
+ required: true
+ multivalued: false
+ objects:
+ name: objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ range: ExternalResources__objects
+ required: true
+ multivalued: false
+ object_keys:
+ name: object_keys
+ description: A table for identifying which objects use which keys.
+ range: ExternalResources__object_keys
+ required: true
+ multivalued: false
+ tree_root: true
+ ExternalResources__keys:
+ name: ExternalResources__keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(keys)
+ range: string
+ required: true
+ equals_string: keys
+ key:
+ name: key
+ description: The user term that maps to one or more resources in the 'resources'
+ table.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__entities:
+ name: ExternalResources__entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(entities)
+ range: string
+ required: true
+ equals_string: entities
+ keys_idx:
+ name: keys_idx
+ description: The index to the key in the 'keys' table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ resources_idx:
+ name: resources_idx
+ description: The index into the 'resources' table
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ entity_id:
+ name: entity_id
+ description: The unique identifier entity.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ entity_uri:
+ name: entity_uri
+ description: The URI for the entity this reference applies to. This can be
+ an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__resources:
+ name: ExternalResources__resources
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(resources)
+ range: string
+ required: true
+ equals_string: resources
+ resource:
+ name: resource
+ description: The name of the resource.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ resource_uri:
+ name: resource_uri
+ description: The URI for the resource. This can be an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__objects:
+ name: ExternalResources__objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(objects)
+ range: string
+ required: true
+ equals_string: objects
+ object_id:
+ name: object_id
+ description: The UUID for the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ relative_path:
+ name: relative_path
+ description: The relative path from the container with the object_id to the
+ dataset or attribute with the value(s) that is associated with an external
+ resource. This can be an empty string if the container is a dataset which
+ contains the value(s) that is associated with an external resource.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ field:
+ name: field
+ description: The field of the compound data type using an external resource.
+ This is used only if the dataset or attribute is a compound data type; otherwise
+ this should be an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__object_keys:
+ name: ExternalResources__object_keys
+ description: A table for identifying which objects use which keys.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(object_keys)
+ range: string
+ required: true
+ equals_string: object_keys
+ objects_idx:
+ name: objects_idx
+ description: The index to the 'objects' table for the object that holds the
+ key.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ keys_idx:
+ name: keys_idx
+ description: The index to the 'keys' table for the key.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml
new file mode 100644
index 0000000..6a311e0
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Experimental data structures provided by HDMF. These are not guaranteed
+ to be available in the future.
+id: hdmf-experimental
+version: 0.2.0
+imports:
+- hdmf-experimental.experimental
+- hdmf-experimental.resources
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml
new file mode 100644
index 0000000..4991b33
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml
@@ -0,0 +1,32 @@
+name: hdmf-experimental.experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.experimental
+version: 0.3.0
+imports:
+- ../../hdmf_common/v1_6_0/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.experimental/
+classes:
+ EnumData:
+ name: EnumData
+ description: Data that come from a fixed set of values. A data value of i corresponds
+ to the i-th value in the VectorData referenced by the 'elements' attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ elements:
+ name: elements
+ description: Reference to the VectorData object that contains the enumerable
+ elements
+ range: VectorData
+ required: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml
new file mode 100644
index 0000000..0a824ca
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-experimental.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml
new file mode 100644
index 0000000..ca25659
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml
@@ -0,0 +1,223 @@
+name: hdmf-experimental.resources
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.resources
+version: 0.3.0
+imports:
+- ../../hdmf_common/v1_6_0/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.resources/
+classes:
+ ExternalResources:
+ name: ExternalResources
+ description: 'A set of five tables for tracking external resource references in
+ a file. NOTE: this data type is experimental and is subject to change in a later
+ version.'
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ keys:
+ name: keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ range: ExternalResources__keys
+ required: true
+ multivalued: false
+ files:
+ name: files
+ description: A table for storing object ids of files used in external resources.
+ range: ExternalResources__files
+ required: true
+ multivalued: false
+ entities:
+ name: entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ range: ExternalResources__entities
+ required: true
+ multivalued: false
+ objects:
+ name: objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ range: ExternalResources__objects
+ required: true
+ multivalued: false
+ object_keys:
+ name: object_keys
+ description: A table for identifying which objects use which keys.
+ range: ExternalResources__object_keys
+ required: true
+ multivalued: false
+ tree_root: true
+ ExternalResources__keys:
+ name: ExternalResources__keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(keys)
+ range: string
+ required: true
+ equals_string: keys
+ key:
+ name: key
+ description: The user term that maps to one or more resources in the `resources`
+ table, e.g., "human".
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__files:
+ name: ExternalResources__files
+ description: A table for storing object ids of files used in external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(files)
+ range: string
+ required: true
+ equals_string: files
+ file_object_id:
+ name: file_object_id
+ description: The object id (UUID) of a file that contains objects that refers
+ to external resources.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__entities:
+ name: ExternalResources__entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(entities)
+ range: string
+ required: true
+ equals_string: entities
+ keys_idx:
+ name: keys_idx
+ description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ entity_id:
+ name: entity_id
+ description: The compact uniform resource identifier (CURIE) of the entity,
+ in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ entity_uri:
+ name: entity_uri
+ description: The URI for the entity this reference applies to. This can be
+ an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__objects:
+ name: ExternalResources__objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(objects)
+ range: string
+ required: true
+ equals_string: objects
+ files_idx:
+ name: files_idx
+ description: The row index to the file in the `files` table containing the
+ object.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ object_id:
+ name: object_id
+ description: The object id (UUID) of the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ object_type:
+ name: object_type
+ description: The data type of the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ relative_path:
+ name: relative_path
+ description: The relative path from the data object with the `object_id` to
+ the dataset or attribute with the value(s) that is associated with an external
+ resource. This can be an empty string if the object is a dataset that contains
+ the value(s) that is associated with an external resource.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ field:
+ name: field
+ description: The field within the compound data type using an external resource.
+ This is used only if the dataset or attribute is a compound data type; otherwise
+ this should be an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__object_keys:
+ name: ExternalResources__object_keys
+ description: A table for identifying which objects use which keys.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(object_keys)
+ range: string
+ required: true
+ equals_string: object_keys
+ objects_idx:
+ name: objects_idx
+ description: The row index to the object in the `objects` table that holds
+ the key
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ keys_idx:
+ name: keys_idx
+ description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml
new file mode 100644
index 0000000..fe62e64
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Experimental data structures provided by HDMF. These are not guaranteed
+ to be available in the future.
+id: hdmf-experimental
+version: 0.3.0
+imports:
+- hdmf-experimental.experimental
+- hdmf-experimental.resources
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml
new file mode 100644
index 0000000..6332939
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml
@@ -0,0 +1,32 @@
+name: hdmf-experimental.experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.experimental
+version: 0.4.0
+imports:
+- ../../hdmf_common/v1_7_0/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.experimental/
+classes:
+ EnumData:
+ name: EnumData
+ description: Data that come from a fixed set of values. A data value of i corresponds
+ to the i-th value in the VectorData referenced by the 'elements' attribute.
+ is_a: VectorData
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ elements:
+ name: elements
+ description: Reference to the VectorData object that contains the enumerable
+ elements
+ range: VectorData
+ required: true
+ tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml
new file mode 100644
index 0000000..0a824ca
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml
@@ -0,0 +1,94 @@
+name: hdmf-experimental.nwb.language
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: 'False'
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
+id: nwb.language
+imports:
+- linkml:types
+prefixes:
+ linkml:
+ prefix_prefix: linkml
+ prefix_reference: https://w3id.org/linkml
+default_prefix: nwb.language/
+types:
+ float32:
+ name: float32
+ typeof: float
+ float64:
+ name: float64
+ typeof: double
+ long:
+ name: long
+ typeof: integer
+ int64:
+ name: int64
+ typeof: integer
+ int:
+ name: int
+ typeof: integer
+ int32:
+ name: int32
+ typeof: integer
+ int16:
+ name: int16
+ typeof: integer
+ short:
+ name: short
+ typeof: integer
+ int8:
+ name: int8
+ typeof: integer
+ uint:
+ name: uint
+ typeof: integer
+ minimum_value: 0
+ uint32:
+ name: uint32
+ typeof: integer
+ minimum_value: 0
+ uint16:
+ name: uint16
+ typeof: integer
+ minimum_value: 0
+ uint8:
+ name: uint8
+ typeof: integer
+ minimum_value: 0
+ uint64:
+ name: uint64
+ typeof: integer
+ minimum_value: 0
+ numeric:
+ name: numeric
+ typeof: float
+ text:
+ name: text
+ typeof: string
+ utf:
+ name: utf
+ typeof: string
+ utf8:
+ name: utf8
+ typeof: string
+ utf_8:
+ name: utf_8
+ typeof: string
+ ascii:
+ name: ascii
+ typeof: string
+ bool:
+ name: bool
+ typeof: boolean
+ isodatetime:
+ name: isodatetime
+ typeof: datetime
+classes:
+ AnyType:
+ name: AnyType
+ description: Needed because some classes in hdmf-common are datasets without dtype
+ class_uri: linkml:Any
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml
new file mode 100644
index 0000000..e2acf65
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml
@@ -0,0 +1,248 @@
+name: hdmf-experimental.resources
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: false
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+id: hdmf-experimental.resources
+version: 0.4.0
+imports:
+- ../../hdmf_common/v1_7_0/namespace
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental.resources/
+classes:
+ ExternalResources:
+ name: ExternalResources
+ description: 'A set of five tables for tracking external resource references in
+ a file. NOTE: this data type is experimental and is subject to change in a later
+ version.'
+ is_a: Container
+ attributes:
+ name:
+ name: name
+ range: string
+ required: true
+ keys:
+ name: keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ range: ExternalResources__keys
+ required: true
+ multivalued: false
+ files:
+ name: files
+ description: A table for storing object ids of files used in external resources.
+ range: ExternalResources__files
+ required: true
+ multivalued: false
+ entities:
+ name: entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ range: ExternalResources__entities
+ required: true
+ multivalued: false
+ objects:
+ name: objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ range: ExternalResources__objects
+ required: true
+ multivalued: false
+ object_keys:
+ name: object_keys
+ description: A table for identifying which objects use which keys.
+ range: ExternalResources__object_keys
+ required: true
+ multivalued: false
+ entity_keys:
+ name: entity_keys
+ description: A table for identifying which keys use which entity.
+ range: ExternalResources__entity_keys
+ required: true
+ multivalued: false
+ tree_root: true
+ ExternalResources__keys:
+ name: ExternalResources__keys
+ description: A table for storing user terms that are used to refer to external
+ resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(keys)
+ range: string
+ required: true
+ equals_string: keys
+ key:
+ name: key
+ description: The user term that maps to one or more resources in the `resources`
+ table, e.g., "human".
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__files:
+ name: ExternalResources__files
+ description: A table for storing object ids of files used in external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(files)
+ range: string
+ required: true
+ equals_string: files
+ file_object_id:
+ name: file_object_id
+ description: The object id (UUID) of a file that contains objects that refers
+ to external resources.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__entities:
+ name: ExternalResources__entities
+ description: A table for mapping user terms (i.e., keys) to resource entities.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(entities)
+ range: string
+ required: true
+ equals_string: entities
+ entity_id:
+ name: entity_id
+ description: The compact uniform resource identifier (CURIE) of the entity,
+ in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ entity_uri:
+ name: entity_uri
+ description: The URI for the entity this reference applies to. This can be
+ an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__objects:
+ name: ExternalResources__objects
+ description: A table for identifying which objects in a file contain references
+ to external resources.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(objects)
+ range: string
+ required: true
+ equals_string: objects
+ files_idx:
+ name: files_idx
+ description: The row index to the file in the `files` table containing the
+ object.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ object_id:
+ name: object_id
+ description: The object id (UUID) of the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ object_type:
+ name: object_type
+ description: The data type of the object.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ relative_path:
+ name: relative_path
+ description: The relative path from the data object with the `object_id` to
+ the dataset or attribute with the value(s) that is associated with an external
+ resource. This can be an empty string if the object is a dataset that contains
+ the value(s) that is associated with an external resource.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ field:
+ name: field
+ description: The field within the compound data type using an external resource.
+ This is used only if the dataset or attribute is a compound data type; otherwise
+ this should be an empty string.
+ array:
+ exact_number_dimensions: 1
+ range: text
+ required: true
+ multivalued: false
+ ExternalResources__object_keys:
+ name: ExternalResources__object_keys
+ description: A table for identifying which objects use which keys.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(object_keys)
+ range: string
+ required: true
+ equals_string: object_keys
+ objects_idx:
+ name: objects_idx
+ description: The row index to the object in the `objects` table that holds
+ the key
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ keys_idx:
+ name: keys_idx
+ description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ ExternalResources__entity_keys:
+ name: ExternalResources__entity_keys
+ description: A table for identifying which keys use which entity.
+ is_a: Data
+ attributes:
+ name:
+ name: name
+ ifabsent: string(entity_keys)
+ range: string
+ required: true
+ equals_string: entity_keys
+ entities_idx:
+ name: entities_idx
+ description: The row index to the entity in the `entities` table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
+ keys_idx:
+ name: keys_idx
+ description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
+ range: uint
+ required: true
+ multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml
new file mode 100644
index 0000000..a48814e
--- /dev/null
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml
@@ -0,0 +1,17 @@
+name: hdmf-experimental
+annotations:
+ is_namespace:
+ tag: is_namespace
+ value: true
+ namespace:
+ tag: namespace
+ value: hdmf-experimental
+description: Experimental data structures provided by HDMF. These are not guaranteed
+ to be available in the future.
+id: hdmf-experimental
+version: 0.4.0
+imports:
+- hdmf-experimental.experimental
+- hdmf-experimental.resources
+- hdmf-experimental.nwb.language
+default_prefix: hdmf-experimental/
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml
index 7bc2244..c6cf1d4 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml
@@ -28,4 +28,5 @@ classes:
description: Reference to the VectorData object that contains the enumerable
elements
range: VectorData
+ required: true
tree_root: true
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
index 5bb0e2b..0a824ca 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml
@@ -5,7 +5,7 @@ annotations:
value: 'False'
namespace:
tag: namespace
- value: core
+ value: hdmf-experimental
description: Adapter objects to mimic the behavior of elements in the nwb-schema-language
id: nwb.language
imports:
@@ -19,67 +19,53 @@ types:
float32:
name: float32
typeof: float
- repr: np.float32
float64:
name: float64
typeof: double
- repr: np.float64
long:
name: long
typeof: integer
- repr: np.longlong
int64:
name: int64
typeof: integer
- repr: np.int64
int:
name: int
typeof: integer
int32:
name: int32
typeof: integer
- repr: np.int32
int16:
name: int16
typeof: integer
- repr: np.int16
short:
name: short
typeof: integer
- repr: np.int16
int8:
name: int8
typeof: integer
- repr: np.int8
uint:
name: uint
typeof: integer
- repr: np.uint64
minimum_value: 0
uint32:
name: uint32
typeof: integer
- repr: np.uint32
minimum_value: 0
uint16:
name: uint16
typeof: integer
- repr: np.uint16
minimum_value: 0
uint8:
name: uint8
typeof: integer
- repr: np.uint8
minimum_value: 0
uint64:
name: uint64
typeof: integer
- repr: np.uint64
minimum_value: 0
numeric:
name: numeric
typeof: float
- repr: np.number
text:
name: text
typeof: string
@@ -101,7 +87,6 @@ types:
isodatetime:
name: isodatetime
typeof: datetime
- repr: np.datetime64
classes:
AnyType:
name: AnyType
diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
index d4514ed..7478fe1 100644
--- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
+++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml
@@ -78,6 +78,8 @@ classes:
name: key
description: The user term that maps to one or more resources in the `resources`
table, e.g., "human".
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -96,6 +98,8 @@ classes:
name: file_object_id
description: The object id (UUID) of a file that contains objects that refers
to external resources.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -114,6 +118,8 @@ classes:
name: entity_id
description: The compact uniform resource identifier (CURIE) of the entity,
in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -121,6 +127,8 @@ classes:
name: entity_uri
description: The URI for the entity this reference applies to. This can be
an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -140,18 +148,24 @@ classes:
name: files_idx
description: The row index to the file in the `files` table containing the
object.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
object_id:
name: object_id
description: The object id (UUID) of the object.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
object_type:
name: object_type
description: The data type of the object.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -161,6 +175,8 @@ classes:
the dataset or attribute with the value(s) that is associated with an external
resource. This can be an empty string if the object is a dataset that contains
the value(s) that is associated with an external resource.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -169,6 +185,8 @@ classes:
description: The field within the compound data type using an external resource.
This is used only if the dataset or attribute is a compound data type; otherwise
this should be an empty string.
+ array:
+ exact_number_dimensions: 1
range: text
required: true
multivalued: false
@@ -187,12 +205,16 @@ classes:
name: objects_idx
description: The row index to the object in the `objects` table that holds
the key
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
@@ -210,12 +232,16 @@ classes:
entities_idx:
name: entities_idx
description: The row index to the entity in the `entities` table.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
keys_idx:
name: keys_idx
description: The row index to the key in the `keys` table.
+ array:
+ exact_number_dimensions: 1
range: uint
required: true
multivalued: false
diff --git a/nwb_linkml/src/nwb_linkml/types/df.py b/nwb_linkml/src/nwb_linkml/types/df.py
index 2fcea88..19d36a5 100644
--- a/nwb_linkml/src/nwb_linkml/types/df.py
+++ b/nwb_linkml/src/nwb_linkml/types/df.py
@@ -14,180 +14,163 @@ Pydantic models that behave like pandas dataframes
left in this module since it is necessary for it to make sense.
"""
-import ast
-from typing import Any, Dict, Optional, Type
+#
+# class DataFrame(BaseModel, pd.DataFrame):
+# """
+# Pydantic model root class that mimics a pandas dataframe.
+#
+# Notes:
+#
+# The synchronization between the underlying lists in the pydantic model
+# and the derived dataframe is partial, and at the moment unidirectional.
+# This class is primarily intended for reading from tables stored in
+# NWB files rather than being able to manipulate them.
+#
+# The dataframe IS updated when new values are *assigned* to a field.
+#
+# eg.::
+#
+# MyModel.fieldval = [1,2,3]
+#
+# But the dataframe is NOT updated when existing values are updated.
+#
+# eg.::
+#
+# MyModel.fieldval.append(4)
+#
+# In that case you need to call :meth:`.update_df` manually.
+#
+# Additionally, if the dataframe is modified, the underlying lists are NOT updated,
+# but when the model is dumped to a dictionary or serialized, the dataframe IS used,
+# so changes will be reflected then.
+#
+# Fields that shadow pandas methods WILL prevent them from being usable, except
+# by directly accessing the dataframe like ``mymodel._df``
+#
+# """
+#
+# _df: pd.DataFrame = None
+# model_config = ConfigDict(validate_assignment=True)
+#
+# def __init__(self, **kwargs):
+# # pdb.set_trace()
+# super().__init__(**kwargs)
+#
+# self._df = self.__make_df()
+#
+# def __make_df(self) -> pd.DataFrame:
+# # make dict that can handle ragged arrays and NoneTypes
+# items = {k: v for k, v in self.__dict__.items() if k in self.model_fields}
+#
+# df_dict = {
+# k: (pd.Series(v) if isinstance(v, list) else pd.Series([v])) for k, v in items.items()
+# }
+# df = pd.DataFrame(df_dict)
+# # replace Nans with None
+# df = df.fillna(np.nan).replace([np.nan], [None])
+# return df
+#
+# def update_df(self) -> None:
+# """
+# Update the internal dataframe in the case that the model values are changed
+# in a way that we can't detect, like appending to one of the lists.
+#
+# """
+# self._df = self.__make_df()
+#
+# def __getattr__(self, item: str):
+# """
+# Mimic pandas dataframe and pydantic model behavior
+# """
+# if item in ("df", "_df"):
+# return self.__pydantic_private__["_df"]
+# elif item in self.model_fields:
+# return self._df[item]
+# else:
+# try:
+# return object.__getattribute__(self._df, item)
+# except AttributeError:
+# return object.__getattribute__(self, item)
+#
+# @model_validator(mode="after")
+# def recreate_df(self) -> None:
+# """
+# Remake DF when validating (eg. when updating values on assignment)
+# """
+# self.update_df()
+#
+# @model_serializer(mode="wrap", when_used="always")
+# def serialize_model(self, nxt: SerializerFunctionWrapHandler) -> Dict[str, Any]:
+# """
+# We don't handle values that are changed on the dataframe by directly
+# updating the underlying model lists, but we implicitly handle them
+# by using the dataframe as the source when serializing
+# """
+# if self._df is None:
+# return nxt(self)
+# else:
+# out = self._df.to_dict("list")
+# # remove Nones
+# out = {k: [inner_v for inner_v in v if inner_v is not None] for k, v in out.items()}
+# return nxt(self.__class__(**out))
-import h5py
-import numpy as np
-import pandas as pd
-from pydantic import (
- BaseModel,
- ConfigDict,
- SerializerFunctionWrapHandler,
- model_serializer,
- model_validator,
-)
-
-from nwb_linkml.maps.hdmf import dereference_reference_vector, model_from_dynamictable
-from nwb_linkml.types.hdf5 import HDF5_Path
-
-
-class DataFrame(BaseModel, pd.DataFrame):
- """
- Pydantic model root class that mimics a pandas dataframe.
-
- Notes:
-
- The synchronization between the underlying lists in the pydantic model
- and the derived dataframe is partial, and at the moment unidirectional.
- This class is primarily intended for reading from tables stored in
- NWB files rather than being able to manipulate them.
-
- The dataframe IS updated when new values are *assigned* to a field.
-
- eg.::
-
- MyModel.fieldval = [1,2,3]
-
- But the dataframe is NOT updated when existing values are updated.
-
- eg.::
-
- MyModel.fieldval.append(4)
-
- In that case you need to call :meth:`.update_df` manually.
-
- Additionally, if the dataframe is modified, the underlying lists are NOT updated,
- but when the model is dumped to a dictionary or serialized, the dataframe IS used,
- so changes will be reflected then.
-
- Fields that shadow pandas methods WILL prevent them from being usable, except
- by directly accessing the dataframe like ``mymodel._df``
-
- """
-
- _df: pd.DataFrame = None
- model_config = ConfigDict(validate_assignment=True)
-
- def __init__(self, **kwargs):
- # pdb.set_trace()
- super().__init__(**kwargs)
-
- self._df = self.__make_df()
-
- def __make_df(self) -> pd.DataFrame:
- # make dict that can handle ragged arrays and NoneTypes
- items = {k: v for k, v in self.__dict__.items() if k in self.model_fields}
-
- df_dict = {
- k: (pd.Series(v) if isinstance(v, list) else pd.Series([v])) for k, v in items.items()
- }
- df = pd.DataFrame(df_dict)
- # replace Nans with None
- df = df.fillna(np.nan).replace([np.nan], [None])
- return df
-
- def update_df(self) -> None:
- """
- Update the internal dataframe in the case that the model values are changed
- in a way that we can't detect, like appending to one of the lists.
-
- """
- self._df = self.__make_df()
-
- def __getattr__(self, item: str):
- """
- Mimic pandas dataframe and pydantic model behavior
- """
- if item in ("df", "_df"):
- return self.__pydantic_private__["_df"]
- elif item in self.model_fields:
- return self._df[item]
- else:
- try:
- return object.__getattribute__(self._df, item)
- except AttributeError:
- return object.__getattribute__(self, item)
-
- @model_validator(mode="after")
- def recreate_df(self) -> None:
- """
- Remake DF when validating (eg. when updating values on assignment)
- """
- self.update_df()
-
- @model_serializer(mode="wrap", when_used="always")
- def serialize_model(self, nxt: SerializerFunctionWrapHandler) -> Dict[str, Any]:
- """
- We don't handle values that are changed on the dataframe by directly
- updating the underlying model lists, but we implicitly handle them
- by using the dataframe as the source when serializing
- """
- if self._df is None:
- return nxt(self)
- else:
- out = self._df.to_dict("list")
- # remove Nones
- out = {k: [inner_v for inner_v in v if inner_v is not None] for k, v in out.items()}
- return nxt(self.__class__(**out))
-
-
-def dynamictable_to_df(
- group: h5py.Group, model: Optional[Type[DataFrame]] = None, base: Optional[BaseModel] = None
-) -> DataFrame:
- """Generate a dataframe from an NDB DynamicTable"""
- if model is None:
- model = model_from_dynamictable(group, base)
-
- items = {}
- for col, _col_type in model.model_fields.items():
- if col not in group:
- continue
- idxname = col + "_index"
- if idxname in group:
- idx = group.get(idxname)[:]
- data = group.get(col)[idx - 1]
- else:
- data = group.get(col)[:]
-
- # Handle typing inside of list
- if isinstance(data[0], bytes):
- data = data.astype("unicode")
- if isinstance(data[0], str):
- # lists and other compound data types can get flattened out to strings when stored
- # so we try and literal eval and recover them
- try:
- eval_type = type(ast.literal_eval(data[0]))
- except (ValueError, SyntaxError):
- eval_type = str
-
- # if we've found one of those, get the data type within it.
- if eval_type is not str:
- eval_list = []
- for item in data.tolist():
- try:
- eval_list.append(ast.literal_eval(item))
- except ValueError:
- eval_list.append(None)
- data = eval_list
- elif isinstance(data[0], h5py.h5r.Reference):
- data = [HDF5_Path(group[d].name) for d in data]
- elif isinstance(data[0], tuple) and any(
- [isinstance(d, h5py.h5r.Reference) for d in data[0]]
- ):
- # references stored inside a tuple, reference + location.
- # dereference them!?
- dset = group.get(col)
- names = dset.dtype.names
- if names is not None and names[0] == "idx_start" and names[1] == "count":
- data = dereference_reference_vector(dset, data)
-
- else:
- data = data.tolist()
-
- # After list, check if we need to put this thing inside of
- # another class, as indicated by the enclosing model
-
- items[col] = data
-
- return model(hdf5_path=group.name, name=group.name.split("/")[-1], **items)
+#
+# def dynamictable_to_df(
+# group: h5py.Group, model: Optional[Type[DataFrame]] = None, base: Optional[BaseModel] = None
+# ) -> DataFrame:
+# """Generate a dataframe from an NDB DynamicTable"""
+# if model is None:
+# model = model_from_dynamictable(group, base)
+#
+# items = {}
+# for col, _col_type in model.model_fields.items():
+# if col not in group:
+# continue
+# idxname = col + "_index"
+# if idxname in group:
+# idx = group.get(idxname)[:]
+# data = group.get(col)[idx - 1]
+# else:
+# data = group.get(col)[:]
+#
+# # Handle typing inside of list
+# if isinstance(data[0], bytes):
+# data = data.astype("unicode")
+# if isinstance(data[0], str):
+# # lists and other compound data types can get flattened out to strings when stored
+# # so we try and literal eval and recover them
+# try:
+# eval_type = type(ast.literal_eval(data[0]))
+# except (ValueError, SyntaxError):
+# eval_type = str
+#
+# # if we've found one of those, get the data type within it.
+# if eval_type is not str:
+# eval_list = []
+# for item in data.tolist():
+# try:
+# eval_list.append(ast.literal_eval(item))
+# except ValueError:
+# eval_list.append(None)
+# data = eval_list
+# elif isinstance(data[0], h5py.h5r.Reference):
+# data = [HDF5_Path(group[d].name) for d in data]
+# elif isinstance(data[0], tuple) and any(
+# [isinstance(d, h5py.h5r.Reference) for d in data[0]]
+# ):
+# # references stored inside a tuple, reference + location.
+# # dereference them!?
+# dset = group.get(col)
+# names = dset.dtype.names
+# if names is not None and names[0] == "idx_start" and names[1] == "count":
+# data = dereference_reference_vector(dset, data)
+#
+# else:
+# data = data.tolist()
+#
+# # After list, check if we need to put this thing inside of
+# # another class, as indicated by the enclosing model
+#
+# items[col] = data
+#
+# return model(hdf5_path=group.name, name=group.name.split("/")[-1], **items)
diff --git a/nwb_linkml/tests/fixtures.py b/nwb_linkml/tests/fixtures.py
index e4b8fae..a38e3e0 100644
--- a/nwb_linkml/tests/fixtures.py
+++ b/nwb_linkml/tests/fixtures.py
@@ -1,6 +1,7 @@
import shutil
from dataclasses import dataclass, field
from pathlib import Path
+from types import ModuleType
from typing import Dict, Optional
import pytest
@@ -15,6 +16,8 @@ from linkml_runtime.linkml_model import (
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.io import schema as io
+from nwb_linkml.providers import LinkMLProvider, PydanticProvider
+from nwb_linkml.providers.linkml import LinkMLSchemaBuild
from nwb_schema_language import Attribute, Dataset, Group
__all__ = [
@@ -79,7 +82,6 @@ def tmp_output_dir_mod(tmp_output_dir) -> Path:
@pytest.fixture(scope="session", params=[{"core_version": "2.7.0", "hdmf_version": "1.8.0"}])
def nwb_core_fixture(request) -> NamespacesAdapter:
nwb_core = io.load_nwb_core(**request.param)
- nwb_core.populate_imports()
assert (
request.param["core_version"] in nwb_core.versions["core"]
) # 2.6.0 is actually 2.6.0-alpha
@@ -88,6 +90,24 @@ def nwb_core_fixture(request) -> NamespacesAdapter:
return nwb_core
+@pytest.fixture(scope="session")
+def nwb_core_linkml(nwb_core_fixture, tmp_output_dir) -> LinkMLSchemaBuild:
+ provider = LinkMLProvider(tmp_output_dir, allow_repo=False, verbose=False)
+ result = provider.build(ns_adapter=nwb_core_fixture, force=True)
+ return result["core"]
+
+
+@pytest.fixture(scope="session")
+def nwb_core_module(nwb_core_linkml: LinkMLSchemaBuild, tmp_output_dir) -> ModuleType:
+ """
+ Generated pydantic namespace from nwb core
+ """
+ provider = PydanticProvider(tmp_output_dir, verbose=False)
+ result = provider.build(nwb_core_linkml.namespace, force=True)
+ mod = provider.get("core", version=nwb_core_linkml.version, allow_repo=False)
+ return mod
+
+
@pytest.fixture(scope="session")
def data_dir() -> Path:
path = Path(__file__).parent.resolve() / "data"
diff --git a/nwb_linkml/tests/test_adapters/test_adapter.py b/nwb_linkml/tests/test_adapters/test_adapter.py
index e93ab5e..4514f5d 100644
--- a/nwb_linkml/tests/test_adapters/test_adapter.py
+++ b/nwb_linkml/tests/test_adapters/test_adapter.py
@@ -42,6 +42,7 @@ def test_walk_fields(nwb_core_fixture):
dtype = list(nwb_core_fixture.walk_fields(nwb_core_fixture, "dtype"))
dtype_havers = list(nwb_core_fixture.walk_types(nwb_core_fixture, (Dataset, Attribute)))
+ dtype_havers = [haver for haver in dtype_havers if haver.dtype is not None]
compound_dtypes = [len(d.dtype) for d in dtype_havers if isinstance(d.dtype, list)]
expected_dtypes = np.sum(compound_dtypes) + len(dtype_havers)
assert expected_dtypes == len(dtype)
diff --git a/nwb_linkml/tests/test_adapters/test_adapter_classes.py b/nwb_linkml/tests/test_adapters/test_adapter_classes.py
index 464e55f..ee6e7f6 100644
--- a/nwb_linkml/tests/test_adapters/test_adapter_classes.py
+++ b/nwb_linkml/tests/test_adapters/test_adapter_classes.py
@@ -2,6 +2,7 @@ import pytest
from linkml_runtime.linkml_model import SlotDefinition
from nwb_linkml.adapters import DatasetAdapter, GroupAdapter
+from nwb_linkml.maps.dtype import handle_dtype
from nwb_schema_language import CompoundDtype, Dataset, Group, ReferenceDtype
@@ -89,7 +90,7 @@ def test_get_full_name():
parent.cls.neurodata_type_def = None
parent.cls.name = "ParentName"
parent.parent = grandparent
- assert adapter._get_full_name() == "Grandparent__ParentName__ChildName"
+ assert adapter._get_full_name() == "ParentName__ChildName"
# if it has none, raise value error
adapter.cls.name = None
@@ -179,9 +180,9 @@ def test_handle_dtype(nwb_schema):
CompoundDtype(name="reference", doc="reference!", dtype=reftype),
]
- assert cls.handle_dtype(reftype) == "TargetClass"
- assert cls.handle_dtype(None) == "AnyType"
- assert cls.handle_dtype([]) == "AnyType"
+ assert handle_dtype(reftype) == "TargetClass"
+ assert handle_dtype(None) == "AnyType"
+ assert handle_dtype([]) == "AnyType"
# handling compound types is currently TODO
- assert cls.handle_dtype(compoundtype) == "AnyType"
- assert cls.handle_dtype("int32") == "int32"
+ assert handle_dtype(compoundtype) == "AnyType"
+ assert handle_dtype("int32") == "int32"
diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
index 5124bdd..bbcb739 100644
--- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
+++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
@@ -46,3 +46,17 @@ def test_skip_imports(nwb_core_fixture):
# we shouldn't have any of the hdmf-common schema in with us
namespaces = [sch.annotations["namespace"].value for sch in res.schemas]
assert all([ns == "core" for ns in namespaces])
+
+
+@pytest.mark.skip()
+def test_populate_inheritance(nwb_core_fixture):
+ """
+ Classes should receive and override the properties of their parents
+ when they have neurodata_type_inc
+ Args:
+ nwb_core_fixture:
+
+ Returns:
+
+ """
+ pass
diff --git a/nwb_linkml/tests/test_generate.py b/nwb_linkml/tests/test_generate.py
index 70b08bc..529cdd1 100644
--- a/nwb_linkml/tests/test_generate.py
+++ b/nwb_linkml/tests/test_generate.py
@@ -76,6 +76,7 @@ def test_generate_pydantic(tmp_output_dir):
initfile.write("# Autogenerated module indicator")
+@pytest.mark.linkml
@pytest.mark.provider
@pytest.mark.dev
def test_generate_linkml_provider(tmp_output_dir, nwb_core_fixture):
@@ -84,6 +85,7 @@ def test_generate_linkml_provider(tmp_output_dir, nwb_core_fixture):
result = provider.build(nwb_core_fixture)
+@pytest.mark.pydantic
@pytest.mark.provider
@pytest.mark.dev
def test_generate_pydantic_provider(tmp_output_dir):
diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py
new file mode 100644
index 0000000..67011ec
--- /dev/null
+++ b/nwb_linkml/tests/test_includes/conftest.py
@@ -0,0 +1,174 @@
+from typing import Tuple
+
+import numpy as np
+import pytest
+
+from nwb_linkml.models import (
+ Device,
+ DynamicTableRegion,
+ ElectricalSeries,
+ ElectrodeGroup,
+ ExtracellularEphysElectrodes,
+ IntracellularElectrode,
+ IntracellularElectrodesTable,
+ IntracellularRecordingsTable,
+ IntracellularResponsesTable,
+ IntracellularStimuliTable,
+ TimeSeriesReferenceVectorData,
+ Units,
+ VoltageClampSeries,
+ VoltageClampSeriesData,
+ VoltageClampStimulusSeries,
+ VoltageClampStimulusSeriesData,
+)
+
+
+@pytest.fixture()
+def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrodes"]:
+ """
+ Demo electrical series with adjoining electrodes
+ """
+ n_electrodes = 5
+ n_times = 100
+ data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes).astype(float)
+ timestamps = np.linspace(0, 1, n_times)
+
+ device = Device(name="my electrode")
+
+ # electrode group is the physical description of the electrodes
+ electrode_group = ElectrodeGroup(
+ name="GroupA",
+ device=device,
+ description="an electrode group",
+ location="you know where it is",
+ )
+
+ # make electrodes tables
+ electrodes = ExtracellularEphysElectrodes(
+ description="idk these are also electrodes",
+ id=np.arange(0, n_electrodes),
+ x=np.arange(0, n_electrodes).astype(float),
+ y=np.arange(n_electrodes, n_electrodes * 2).astype(float),
+ group=[electrode_group] * n_electrodes,
+ group_name=[electrode_group.name] * n_electrodes,
+ location=[str(i) for i in range(n_electrodes)],
+ extra_column=["sup"] * n_electrodes,
+ )
+
+ electrical_series = ElectricalSeries(
+ name="my recording!",
+ electrodes=DynamicTableRegion(
+ table=electrodes,
+ value=np.arange(n_electrodes - 1, -1, step=-1),
+ name="electrodes",
+ description="hey",
+ ),
+ timestamps=timestamps,
+ data=data,
+ )
+ return electrical_series, electrodes
+
+
+def _ragged_array(n_units: int) -> tuple[list[np.ndarray], np.ndarray]:
+ generator = np.random.default_rng()
+ spike_times = [
+ np.full(shape=generator.integers(10, 50), fill_value=i, dtype=float) for i in range(n_units)
+ ]
+ spike_idx = []
+ for i in range(n_units):
+ if i == 0:
+ spike_idx.append(len(spike_times[0]))
+ else:
+ spike_idx.append(len(spike_times[i]) + spike_idx[i - 1])
+ spike_idx = np.array(spike_idx)
+ return spike_times, spike_idx
+
+
+@pytest.fixture(params=[True, False])
+def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]:
+ """
+ Test case for units
+
+ Parameterized by extra_column because pandas likes to pivot dataframes
+ to long when there is only one column and it's not len() == 1
+ """
+ spike_times, spike_idx = _ragged_array(24)
+
+ spike_times_flat = np.concatenate(spike_times)
+
+ kwargs = {
+ "description": "units!!!!",
+ "spike_times": spike_times_flat,
+ "spike_times_index": spike_idx,
+ }
+ if request.param:
+ kwargs["extra_column"] = ["hey!"] * 24
+ units = Units(**kwargs)
+ return units, spike_times, spike_idx
+
+
+def _icephys_stimulus_and_response(
+ i: int, electrode: IntracellularElectrode
+) -> tuple[VoltageClampStimulusSeries, VoltageClampSeries]:
+ generator = np.random.default_rng()
+ n_samples = generator.integers(20, 50)
+ stimulus = VoltageClampStimulusSeries(
+ name=f"vcss_{i}",
+ data=VoltageClampStimulusSeriesData(value=[i] * n_samples),
+ stimulus_description=f"{i}",
+ sweep_number=i,
+ electrode=electrode,
+ )
+ response = VoltageClampSeries(
+ name=f"vcs_{i}",
+ data=VoltageClampSeriesData(value=[i] * n_samples),
+ stimulus_description=f"{i}",
+ electrode=electrode,
+ )
+ return stimulus, response
+
+
+@pytest.fixture()
+def intracellular_recordings_table() -> IntracellularRecordingsTable:
+ n_recordings = 10
+ generator = np.random.default_rng()
+ device = Device(name="my device")
+ electrode = IntracellularElectrode(
+ name="my_electrode", description="an electrode", device=device
+ )
+ stims = []
+ responses = []
+ for i in range(n_recordings):
+ stim, response = _icephys_stimulus_and_response(i, electrode)
+ stims.append(stim)
+ responses.append(response)
+
+ electrodes = IntracellularElectrodesTable(
+ name="intracellular_electrodes", electrode=[electrode] * n_recordings
+ )
+ stimuli = IntracellularStimuliTable(
+ name="intracellular_stimuli",
+ stimulus=TimeSeriesReferenceVectorData(
+ name="stimulus",
+ description="this should be optional",
+ idx_start=np.arange(n_recordings),
+ count=generator.integers(1, 10, (n_recordings,)),
+ timeseries=stims,
+ ),
+ )
+
+ responses = IntracellularResponsesTable(
+ name="intracellular_responses",
+ response=TimeSeriesReferenceVectorData(
+ name="response",
+ description="this should be optional",
+ idx_start=np.arange(n_recordings),
+ count=generator.integers(1, 10, (n_recordings,)),
+ timeseries=responses,
+ ),
+ )
+
+ recordings_table = IntracellularRecordingsTable(
+ electrodes=electrodes, stimuli=stimuli, responses=responses
+ )
+ return recordings_table
diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py
index 0024917..cb2b974 100644
--- a/nwb_linkml/tests/test_includes/test_hdmf.py
+++ b/nwb_linkml/tests/test_includes/test_hdmf.py
@@ -1,27 +1,773 @@
-from typing import Tuple
+from typing import Optional, Type
import numpy as np
+import pandas as pd
import pytest
+from numpydantic import NDArray, Shape
+from pydantic import ValidationError
-from nwb_linkml.models.pydantic.core.v2_7_0.namespace import (
- ElectricalSeries,
- NWBFileGeneralExtracellularEphysElectrodes,
+from nwb_linkml.includes import hdmf
+from nwb_linkml.includes.hdmf import (
+ AlignedDynamicTableMixin,
+ DynamicTableMixin,
+ VectorDataMixin,
+ VectorIndexMixin,
)
+# FIXME: Make this just be the output of the provider by patching into import machinery
+from nwb_linkml.models.pydantic.core.v2_7_0.namespace import (
+ ElectrodeGroup,
+)
+
+from .conftest import _ragged_array
+
+# --------------------------------------------------
+# Unit tests on mixins directly (model tests below)
+# --------------------------------------------------
+
@pytest.fixture()
-def electrical_series() -> Tuple[ElectricalSeries, NWBFileGeneralExtracellularEphysElectrodes]:
- """
- Demo electrical series with adjoining electrodes
- """
- n_electrodes = 5
- n_times = 100
- data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes)
- timestamps = np.linspace(0, 1, n_times)
+def basic_table() -> tuple[DynamicTableMixin, dict[str, NDArray[Shape["10"], int]]]:
+ class MyData(DynamicTableMixin):
+ col_1: hdmf.VectorData[NDArray[Shape["*"], int]]
+ col_2: hdmf.VectorData[NDArray[Shape["*"], int]]
+ col_3: hdmf.VectorData[NDArray[Shape["*"], int]]
- # make electrodes tables
- electrodes = NWBFileGeneralExtracellularEphysElectrodes(
- id=np.arange(0, n_electrodes),
- x=np.arange(0, n_electrodes),
- y=np.arange(n_electrodes, n_electrodes * 2),
+ cols = {
+ "col_1": np.arange(10),
+ "col_2": np.arange(10),
+ "col_3": np.arange(10),
+ "col_4": np.arange(10),
+ "col_5": np.arange(10),
+ }
+ return MyData, cols
+
+
+@pytest.fixture()
+def aligned_table() -> tuple[Type[AlignedDynamicTableMixin], dict[str, DynamicTableMixin]]:
+ class Table1(DynamicTableMixin):
+ col1: hdmf.VectorData[NDArray[Shape["*"], int]]
+ col2: hdmf.VectorData[NDArray[Shape["*"], int]]
+
+ class Table2(DynamicTableMixin):
+ col3: hdmf.VectorData[NDArray[Shape["*"], int]]
+ col4: hdmf.VectorData[NDArray[Shape["*"], int]]
+
+ class Table3(DynamicTableMixin):
+ col5: hdmf.VectorData[NDArray[Shape["*"], int]]
+ col6: hdmf.VectorData[NDArray[Shape["*"], int]]
+
+ array = np.arange(10)
+
+ table1 = Table1(col1=array, col2=array)
+ table2 = Table2(col3=array, col4=array)
+ table3 = Table3(col5=array, col6=array)
+
+ class AlignedTable(AlignedDynamicTableMixin):
+ table1: Table1
+ table2: Table2
+
+ return AlignedTable, {"table1": table1, "table2": table2, "table3": table3}
+
+
+def test_dynamictable_mixin_indexing(basic_table):
+ """
+ Can index values from a dynamictable
+ """
+ MyData, cols = basic_table
+
+ colnames = [c for c in cols]
+ inst = MyData(**cols)
+ assert len(inst) == 10
+
+ row = inst[0]
+ # successfully get a single row :)
+ assert row.shape == (1, 5)
+ assert row.columns.tolist() == colnames
+
+ # slice a range of rows
+ rows = inst[0:3]
+ assert rows.shape == (3, 5)
+
+ # get a single column
+ col = inst["col_1"]
+ assert all(col.value == np.arange(10))
+
+ # get a single cell
+ val = inst[5, "col_2"]
+ assert val == 5
+ val = inst[5, 1]
+ assert val == 5
+
+ # get a slice of rows and columns
+ val = inst[0:3, 0:3]
+ assert val.shape == (3, 3)
+ assert val.columns.tolist() == colnames[0:3]
+
+ # slice of rows with string colname
+ val = inst[0:2, "col_1"]
+ assert val.shape == (2, 1)
+ assert val.columns.tolist() == ["col_1"]
+
+ # array of rows
+ # crazy slow but we'll work on perf later
+ val = inst[np.arange(2), "col_1"]
+ assert val.shape == (2, 1)
+ assert val.columns.tolist() == ["col_1"]
+
+ # should raise an error on a 3d index
+ with pytest.raises(ValueError, match=".*2-dimensional.*"):
+ _ = inst[1, 1, 1]
+
+ # error on unhandled indexing type
+ with pytest.raises(ValueError, match="Unsure how to get item with key.*"):
+ _ = inst[5.5]
+
+
+def test_dynamictable_mixin_colnames():
+ """
+ Should correctly infer colnames
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: NDArray[Shape["* col"], int]
+
+ new_col_1 = VectorDataMixin(value=np.arange(10))
+ new_col_2 = VectorDataMixin(value=np.arange(10))
+
+ inst = MyDT(existing_col=np.arange(10), new_col_1=new_col_1, new_col_2=new_col_2)
+ assert inst.colnames == ["existing_col", "new_col_1", "new_col_2"]
+
+
+def test_dynamictable_mixin_colnames_index():
+ """
+ Exclude index columns in colnames
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: NDArray[Shape["* col"], int]
+
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(value=np.arange(10)),
+ }
+ # explicit index with mismatching name
+ cols["weirdname_index"] = VectorIndexMixin(value=np.arange(10), target=cols["new_col_1"])
+ # implicit index with matching name
+ cols["new_col_2_index"] = VectorIndexMixin(value=np.arange(10))
+
+ inst = MyDT(**cols)
+ assert inst.colnames == ["existing_col", "new_col_1", "new_col_2"]
+
+
+def test_dynamictable_mixin_colnames_ordered():
+ """
+ Should be able to pass explicit order to colnames
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: NDArray[Shape["* col"], int]
+
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(value=np.arange(10)),
+ "new_col_3": hdmf.VectorData(value=np.arange(10)),
+ }
+ order = ["new_col_2", "existing_col", "new_col_1", "new_col_3"]
+
+ inst = MyDT(**cols, colnames=order)
+ assert inst.colnames == order
+
+ # this should get reflected in the columns selector and the df produces
+ assert all([key1 == key2 for key1, key2 in zip(order, inst._columns)])
+ assert all(inst[0].columns == order)
+
+ # partial lists should append unnamed columns at the end
+ partial_order = ["new_col_3", "new_col_2"]
+ inst = MyDT(**cols, colnames=partial_order)
+ assert inst.colnames == [*partial_order, "existing_col", "new_col_1"]
+
+
+def test_dynamictable_mixin_getattr():
+ """
+ Dynamictable should forward unknown getattr requests to the df
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
+
+ col = hdmf.VectorData(value=np.arange(10))
+ inst = MyDT(existing_col=col)
+
+ # regular lookup for attrs that exist
+ assert isinstance(inst.existing_col, hdmf.VectorData)
+ assert all(inst.existing_col.value == col.value)
+
+ # df lookup for those that don't
+ assert isinstance(inst.columns, pd.Index)
+
+ with pytest.raises(AttributeError):
+ _ = inst.really_fake_name_that_pandas_and_pydantic_definitely_dont_define
+
+
+def test_dynamictable_coercion():
+ """
+ Dynamictable should coerce arrays into vectordata objects for known and unknown cols
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
+ optional_col: Optional[hdmf.VectorData[NDArray[Shape["* col"], int]]]
+
+ cols = {
+ "existing_col": np.arange(10),
+ "optional_col": np.arange(10),
+ "new_col_1": np.arange(10),
+ }
+ inst = MyDT(**cols)
+ assert isinstance(inst.existing_col, hdmf.VectorData)
+ assert isinstance(inst.optional_col, hdmf.VectorData)
+ assert isinstance(inst.new_col_1, hdmf.VectorData)
+ assert all(inst.existing_col.value == np.arange(10))
+ assert all(inst.optional_col.value == np.arange(10))
+ assert all(inst.new_col_1.value == np.arange(10))
+
+
+def test_dynamictable_create_id():
+ class MyDT(DynamicTableMixin):
+ existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
+
+ cols = {
+ "existing_col": np.arange(10),
+ }
+ inst = MyDT(**cols)
+
+ assert all(inst.id == np.arange(10))
+
+
+def test_dynamictable_resolve_index():
+ """
+ Dynamictable should resolve and connect data to indices, explicit and implicit
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
+
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ "new_col_2": hdmf.VectorData(value=np.arange(10)),
+ }
+ # explicit index with mismatching name
+ cols["weirdname_index"] = hdmf.VectorIndex(value=np.arange(10), target=cols["new_col_1"])
+ # implicit index with matching name
+ cols["new_col_2_index"] = hdmf.VectorIndex(value=np.arange(10))
+
+ inst = MyDT(**cols)
+ assert inst.weirdname_index.target is inst.new_col_1
+ assert inst.new_col_2_index.target is inst.new_col_2
+ assert inst.new_col_1._index is inst.weirdname_index
+ assert inst.new_col_2._index is inst.new_col_2_index
+
+
+def test_dynamictable_assert_equal_length():
+ """
+ Dynamictable validates that columns are of equal length
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: NDArray[Shape["* col"], int]
+
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(11)),
+ }
+ with pytest.raises(ValidationError, match="Columns are not of equal length"):
+ _ = MyDT(**cols)
+
+ cols = {
+ "existing_col": np.arange(11),
+ "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ }
+ with pytest.raises(ValidationError, match="Columns are not of equal length"):
+ _ = MyDT(**cols)
+
+ # wrong lengths are fine as long as the index is good
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(100)),
+ "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 10) + 10),
+ }
+ _ = MyDT(**cols)
+
+ # but not fine if the index is not good
+ cols = {
+ "existing_col": np.arange(10),
+ "new_col_1": hdmf.VectorData(value=np.arange(100)),
+ "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5),
+ }
+ with pytest.raises(ValidationError, match="Columns are not of equal length"):
+ _ = MyDT(**cols)
+
+
+def test_dynamictable_setattr():
+ """
+ Setting a new column as an attribute adds it to colnames and reruns validations
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]]
+
+ cols = {
+ "existing_col": hdmf.VectorData(value=np.arange(10)),
+ "new_col_1": hdmf.VectorData(value=np.arange(10)),
+ }
+ inst = MyDT(existing_col=cols["existing_col"])
+ assert inst.colnames == ["existing_col"]
+
+ inst.new_col_1 = cols["new_col_1"]
+ assert inst.colnames == ["existing_col", "new_col_1"]
+ assert inst[:].columns.tolist() == ["existing_col", "new_col_1"]
+ # length unchanged because id should be the same
+ assert len(inst) == 10
+
+ # model validators should be called to ensure equal length
+ with pytest.raises(ValidationError):
+ inst.new_col_2 = hdmf.VectorData(value=np.arange(11))
+
+
+def test_vectordata_indexing():
+ """
+ Vectordata/VectorIndex pairs should know how to index off each other
+ """
+ n_rows = 50
+ value_array, index_array = _ragged_array(n_rows)
+ value_array = np.concat(value_array)
+
+ data = hdmf.VectorData(value=value_array)
+
+ # before we have an index, things should work as normal, indexing a 1D array
+ assert data[0] == 0
+ # and setting values
+ data[0] = 1
+ assert data[0] == 1
+ data[0] = 0
+
+ # indexes by themselves are the same
+ index_notarget = hdmf.VectorIndex(value=index_array)
+ assert index_notarget[0] == index_array[0]
+ assert all(index_notarget[0:3] == index_array[0:3])
+ oldval = index_array[0]
+ index_notarget[0] = 5
+ assert index_notarget[0] == 5
+ index_notarget[0] = oldval
+
+ index = hdmf.VectorIndex(value=index_array, target=data)
+ data._index = index
+
+ # after an index, both objects should index raggedly
+ for i in range(len(index)):
+ assert all(data[i] == i)
+ assert all(index[i] == i)
+
+ for item in (data, index):
+ section = item[0:3]
+ for i, subitem in enumerate(section):
+ assert all(subitem == i)
+
+ # setting uses the same indexing logic
+ data[0] = 5
+ assert all(data[0] == 5)
+ data[0:3] = [5, 4, 3]
+ assert all(data[0] == 5)
+ assert all(data[1] == 4)
+ assert all(data[2] == 3)
+ data[0:3] = 6
+ assert all(data[0] == 6)
+ assert all(data[1] == 6)
+ assert all(data[2] == 6)
+ with pytest.raises(ValueError, match=".*equal-length.*"):
+ data[0:3] = [5, 4]
+
+
+def test_vectordata_getattr():
+ """
+ VectorData and VectorIndex both forward getattr to ``value``
+ """
+ data = hdmf.VectorData(value=np.arange(100))
+ index = hdmf.VectorIndex(value=np.arange(10, 101, 10), target=data)
+
+ # get attrs that we defined on the models
+ # i.e. no attribute errors here
+ _ = data.model_fields
+ _ = index.model_fields
+
+ # but for things that aren't defined, get the numpy method
+ # note that index should not try and get the sum from the target -
+ # that would be hella confusing. we only refer to the target when indexing.
+ assert data.sum() == np.sum(np.arange(100))
+ assert index.sum() == np.sum(np.arange(10, 101, 10))
+
+ # and also raise attribute errors when nothing is found
+ with pytest.raises(AttributeError):
+ _ = data.super_fake_attr_name
+ with pytest.raises(AttributeError):
+ _ = index.super_fake_attr_name
+
+
+def test_vectordata_generic_numpydantic_validation():
+ """
+ Using VectorData as a generic with a numpydantic array annotation should still validate
+
+ Simple test here because numpydantic validation is tested in numpydantic itself,
+ we just want to check that the annotations work as validation and it doesn't just
+ """
+
+ class MyDT(DynamicTableMixin):
+ existing_col: NDArray[Shape["3 col"], int]
+
+ with pytest.raises(ValidationError):
+ _ = MyDT(existing_col=np.zeros((4, 5, 6), dtype=int))
+
+
+@pytest.mark.xfail
+def test_dynamictable_append_row():
+ raise NotImplementedError("Reminder to implement row appending")
+
+
+def test_dynamictable_region_indexing(basic_table):
+ """
+ Without an index, DynamicTableRegion should just be a single-row index into
+ another table
+ """
+ model, cols = basic_table
+ inst = model(**cols)
+
+ index = np.array([9, 4, 8, 3, 7, 2, 6, 1, 5, 0])
+
+ table_region = hdmf.DynamicTableRegion(value=index, table=inst)
+
+ row = table_region[1]
+ assert all(row.iloc[0] == index[1])
+
+ # slices
+ rows = table_region[3:5]
+ assert all(rows[0].iloc[0] == index[3])
+ assert all(rows[1].iloc[0] == index[4])
+ assert len(rows) == 2
+ assert all([row.shape == (1, 5) for row in rows])
+
+ # out of order fine too
+ oorder = [2, 5, 4]
+ rows = table_region[oorder]
+ assert len(rows) == 3
+ assert all([row.shape == (1, 5) for row in rows])
+ for i, idx in enumerate(oorder):
+ assert all(rows[i].iloc[0] == index[idx])
+
+ # also works when used as a column in a table
+ class AnotherTable(DynamicTableMixin):
+ region: hdmf.DynamicTableRegion
+ another_col: hdmf.VectorData[NDArray[Shape["*"], int]]
+
+ inst2 = AnotherTable(region=table_region, another_col=np.arange(10))
+ rows = inst2[0:3]
+ col = rows.region
+ for i in range(3):
+ assert all(col[i].iloc[0] == index[i])
+
+
+def test_dynamictable_region_ragged():
+ """
+ Dynamictables can also have indexes so that they are ragged arrays of column rows
+ """
+ spike_times, spike_idx = _ragged_array(24)
+ spike_times_flat = np.concatenate(spike_times)
+
+ # construct a secondary index that selects overlapping segments of the first table
+ value = np.array([0, 1, 2, 1, 2, 3, 2, 3, 4])
+ idx = np.array([3, 6, 9])
+
+ table = DynamicTableMixin(
+ name="table",
+ description="a table what else would it be",
+ id=np.arange(len(spike_idx)),
+ another_column=np.arange(len(spike_idx) - 1, -1, -1),
+ timeseries=spike_times_flat,
+ timeseries_index=spike_idx,
)
+ region = hdmf.DynamicTableRegion(
+ table=table,
+ value=value,
+ )
+ index = hdmf.VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx)
+ region._index = index
+
+ rows = region[1]
+ # i guess this is right?
+ # the region should be a set of three rows of the table, with a ragged array column timeseries
+ # like...
+ #
+ # id timeseries
+ # 0 1 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ...
+ # 1 2 [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, ...
+ # 2 3 [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, ...
+ assert rows.shape == (3, 2)
+ assert all(rows.index.to_numpy() == [1, 2, 3])
+ assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())])
+
+ rows = region[0:2]
+ for i in range(2):
+ assert all(
+ [all(row[1].timeseries == i) for i, row in zip(range(i, i + 3), rows[i].iterrows())]
+ )
+
+ # also works when used as a column in a table
+ class AnotherTable(DynamicTableMixin):
+ region: hdmf.DynamicTableRegion
+ yet_another_col: hdmf.VectorData[NDArray[Shape["*"], int]]
+
+ inst2 = AnotherTable(region=region, yet_another_col=np.arange(len(idx)))
+ row = inst2[0]
+ assert row.shape == (1, 2)
+ assert row.iloc[0, 0].equals(region[0])
+
+ rows = inst2[0:3]
+ for i, df in enumerate(rows.iloc[:, 0]):
+ assert df.equals(region[i])
+
+
+def test_aligned_dynamictable_indexing(aligned_table):
+ """
+ Should be able to index aligned dynamic tables to yield a multi-index df
+ """
+ AlignedTable, tables = aligned_table
+ atable = AlignedTable(**tables)
+
+ row = atable[0]
+ assert all(
+ row.columns
+ == pd.MultiIndex.from_tuples(
+ [
+ ("table1", "index"),
+ ("table1", "col1"),
+ ("table1", "col2"),
+ ("table2", "index"),
+ ("table2", "col3"),
+ ("table2", "col4"),
+ ("table3", "index"),
+ ("table3", "col5"),
+ ("table3", "col6"),
+ ]
+ )
+ )
+ for i in range(len(atable)):
+ vals = atable[i]
+ assert vals.shape == (1, 9)
+ assert all(vals == i)
+
+ # mildly different, indexing with a slice.
+ rows = atable[0:3]
+ for i, row in enumerate(rows.iterrows()):
+ vals = row[1]
+ assert len(vals) == 9
+ assert all(vals == i)
+
+ # index just a single table
+ row = atable[0:3, "table3"]
+ assert all(row.columns.to_numpy() == ["col5", "col6"])
+ assert row.shape == (3, 2)
+
+ # index out of order
+ rows = atable[np.array([0, 2, 1])]
+ assert all(rows.iloc[:, 0] == [0, 2, 1])
+
+
+def test_mixed_aligned_dynamictable(aligned_table):
+ """
+ Aligned dynamictable should also accept vectordata/vector index pairs
+ """
+
+ AlignedTable, cols = aligned_table
+ value_array, index_array = _ragged_array(10)
+ value_array = np.concat(value_array)
+
+ data = hdmf.VectorData(value=value_array)
+ index = hdmf.VectorIndex(value=index_array)
+
+ atable = AlignedTable(**cols, extra_col=data, extra_col_index=index)
+ atable[0]
+ assert atable[0].columns[-1] == ("extra_col", "extra_col")
+
+ for i, row in enumerate(atable[:].extra_col.iterrows()):
+ array = row[1].iloc[0]
+ assert all(array == i)
+ if i > 0:
+ assert len(array) == index_array[i] - index_array[i - 1]
+ else:
+ assert len(array) == index_array[i]
+
+
+def test_timeseriesreferencevectordata_index():
+ """
+ TimeSeriesReferenceVectorData should be able to do the thing it does
+ """
+ generator = np.random.default_rng()
+ timeseries = np.array([np.arange(100)] * 10)
+
+ counts = generator.integers(1, 10, (10,))
+ idx_start = np.arange(0, 100, 10)
+
+ response = hdmf.TimeSeriesReferenceVectorData(
+ idx_start=idx_start,
+ count=counts,
+ timeseries=timeseries,
+ )
+ for i in range(len(counts)):
+ assert len(response[i]) == counts[i]
+ items = response[3:5]
+ assert all(items[0] == timeseries[3][idx_start[3] : idx_start[3] + counts[3]])
+ assert all(items[1] == timeseries[4][idx_start[4] : idx_start[4] + counts[4]])
+
+ response[0] = np.zeros((counts[0],))
+ assert all(response[0] == 0)
+
+ response[1:3] = [np.zeros((counts[1],)), np.ones((counts[2],))]
+ assert all(response[1] == 0)
+ assert all(response[2] == 1)
+
+
+# --------------------------------------------------
+# Model-based tests
+# --------------------------------------------------
+
+
+def test_dynamictable_indexing_electricalseries(electrical_series):
+ """
+ Can index values from a dynamictable
+ """
+ series, electrodes = electrical_series
+
+ colnames = [
+ "x",
+ "y",
+ "group",
+ "group_name",
+ "location",
+ "extra_column",
+ ]
+ dtypes = [
+ np.dtype("float64"),
+ np.dtype("float64"),
+ ] + ([np.dtype("O")] * 4)
+
+ row = electrodes[0]
+ # successfully get a single row :)
+ assert row.shape == (1, 6)
+ assert row.dtypes.values.tolist() == dtypes
+ assert row.columns.tolist() == colnames
+
+ # slice a range of rows
+ rows = electrodes[0:3]
+ assert rows.shape == (3, 6)
+ assert rows.dtypes.values.tolist() == dtypes
+ assert rows.columns.tolist() == colnames
+
+ # get a single column
+ col = electrodes["y"]
+ assert all(col.value == [5, 6, 7, 8, 9])
+
+ # get a single cell
+ val = electrodes[0, "y"]
+ assert val == 5
+ val = electrodes[0, 1]
+ assert val == 5
+
+ # get a slice of rows and columns
+ subsection = electrodes[0:3, 0:3]
+ assert subsection.shape == (3, 3)
+ assert subsection.columns.tolist() == colnames[0:3]
+ assert subsection.dtypes.values.tolist() == dtypes[0:3]
+
+
+def test_dynamictable_ragged_units(units):
+ """
+ Should be able to index ragged arrays using an implicit _index column
+
+ Also tests:
+ - passing arrays directly instead of wrapping in vectordata/index specifically,
+ if the models in the fixture instantiate then this works
+ """
+ units, spike_times, spike_idx = units
+
+ # ensure we don't pivot to long when indexing
+ assert units[0].shape[0] == 1
+ # check that we got the indexing boundaries corrunect
+ # (and that we are forwarding attr calls to the dataframe by accessing shape
+ for i in range(units.shape[0]):
+ assert np.all(units.iloc[i, 0] == spike_times[i])
+
+
+def test_dynamictable_region_basic_electricalseries(electrical_series):
+ """
+ DynamicTableRegion should be able to refer to a row or rows of another table
+ itself as a column within a table
+ """
+ series, electrodes = electrical_series
+ row = series.electrodes[0]
+ # check that we correctly got the 4th row instead of the 0th row,
+ # since the indexed table was constructed with inverted indexes because it's a test, ya dummy.
+ # we will only vaguely check the basic functionality here bc
+ # a) the indexing behavior of the indexed objects is tested above, and
+ # b) every other object in the chain is strictly validated,
+ # so we assume if we got a right shaped df that it is the correct one.
+ # feel free to @ me when i am wrong about this
+ assert all(row.index == 4)
+ assert row.shape == (1, 6)
+ # and we should still be preserving the model that is the contents of the cell of this row
+ # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup
+ # objects and that's as far as we are going to chase the recursion in this basic indexing test
+ # ElectrodeGroup is strictly validating so an instance check is all we need.
+ assert isinstance(row.group.values[0], ElectrodeGroup)
+
+ # getting a list of table rows is actually correct behavior here because
+ # this list of table rows is actually the cell of another table
+ rows = series.electrodes[0:3]
+ assert all([all(row.index == idx) for row, idx in zip(rows, [4, 3, 2])])
+
+
+def test_aligned_dynamictable_ictable(intracellular_recordings_table):
+ """
+ Multiple aligned dynamictables should be indexable with a multiindex
+ """
+ # can get a single row.. (check correctness below)
+ row = intracellular_recordings_table[0]
+ # can get a single table with its name
+ stimuli = intracellular_recordings_table["stimuli"]
+ assert stimuli.shape == (10, 1)
+
+ # nab a few rows to make the dataframe
+ rows = intracellular_recordings_table[0:3]
+ assert all(
+ rows.columns
+ == pd.MultiIndex.from_tuples(
+ [
+ ("electrodes", "index"),
+ ("electrodes", "electrode"),
+ ("stimuli", "index"),
+ ("stimuli", "stimulus"),
+ ("responses", "index"),
+ ("responses", "response"),
+ ]
+ )
+ )
+
+ # ensure that we get the actual values from the TimeSeriesReferenceVectorData
+ # also tested separately
+ # each individual cell should be an array of VoltageClampStimulusSeries...
+ # and then we should be able to index within that as well
+ stims = rows["stimuli", "stimulus"]
+ for i in range(len(stims)):
+ assert all(np.array(stims[i]) == i)
diff --git a/nwb_linkml/tests/test_logging.py b/nwb_linkml/tests/test_logging.py
new file mode 100644
index 0000000..ddabac1
--- /dev/null
+++ b/nwb_linkml/tests/test_logging.py
@@ -0,0 +1,35 @@
+from pathlib import Path
+
+from nwb_linkml.logging import init_logger
+
+
+def test_init_logger(capsys, tmp_path):
+ """
+ We should be able to
+ - log to file and stdout
+ - with separable levels
+ """
+
+ log_dir = Path(tmp_path) / "logs"
+ log_dir.mkdir()
+ log_file = log_dir / "nwb_linkml.test_logger.log"
+ logger = init_logger(name="test_logger", log_dir=log_dir, level="INFO", file_level="WARNING")
+ warn_msg = "Both loggers should show"
+ logger.warning(warn_msg)
+
+ # can't test for presence of string because logger can split lines depending on size of console
+ # but there should be one WARNING in stdout
+ captured = capsys.readouterr()
+ assert "WARNING" in captured.out
+
+ with open(log_file) as lfile:
+ log_str = lfile.read()
+ assert "WARNING" in log_str
+
+ info_msg = "Now only stdout should show"
+ logger.info(info_msg)
+ captured = capsys.readouterr()
+ assert "INFO" in captured.out
+ with open(log_file) as lfile:
+ log_str = lfile.read()
+ assert "INFO" not in log_str
diff --git a/nwb_linkml/tests/test_maps/test_dtype.py b/nwb_linkml/tests/test_maps/test_dtype.py
deleted file mode 100644
index 569b262..0000000
--- a/nwb_linkml/tests/test_maps/test_dtype.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import nptyping
-import numpy as np
-
-from nwb_linkml.maps.dtype import struct_from_dtype
-
-
-def test_struct_from_dtype():
- # Super weak test with fixed values, will expand with parameterize if needed
- np_dtype = np.dtype([("name1", "int32"), ("name2", "object"), ("name3", "str")])
- struct = struct_from_dtype(np_dtype)
- assert struct == nptyping.Structure["name1: Int32, name2: Object, name3: Unicode"]
diff --git a/nwb_linkml/tests/test_maps/test_hdmf.py b/nwb_linkml/tests/test_maps/test_hdmf.py
deleted file mode 100644
index b6b66dc..0000000
--- a/nwb_linkml/tests/test_maps/test_hdmf.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-
-import h5py
-import pytest
-
-from nwb_linkml.maps.hdmf import dynamictable_to_model, model_from_dynamictable
-
-NWBFILE = "/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb"
-
-
-@pytest.mark.xfail()
-@pytest.mark.parametrize("dataset", ["aibs.nwb"])
-def test_make_dynamictable(data_dir, dataset):
- nwbfile = data_dir / dataset
- h5f = h5py.File(nwbfile, "r")
- group = h5f["units"]
-
- start_time = time.time()
- model = model_from_dynamictable(group)
- data = dynamictable_to_model(group, model)
-
- _ = data.model_dump_json()
- end_time = time.time()
- total_time = end_time - start_time
diff --git a/nwb_linkml/tests/test_providers/test_provider_schema.py b/nwb_linkml/tests/test_providers/test_provider_schema.py
index e92e466..a455e29 100644
--- a/nwb_linkml/tests/test_providers/test_provider_schema.py
+++ b/nwb_linkml/tests/test_providers/test_provider_schema.py
@@ -3,9 +3,9 @@ import sys
from pathlib import Path
from typing import Optional
+import numpy as np
import pytest
-from nptyping import Shape, UByte
-from numpydantic import NDArray
+from numpydantic import NDArray, Shape
import nwb_linkml
from nwb_linkml.maps.naming import version_module_case
@@ -77,7 +77,7 @@ def test_linkml_build_from_yaml(tmp_output_dir):
"comments": Optional[str],
"data": "TimeSeriesData",
"timestamps": "Optional", # __name__ just gets the first part of Optional[TimeSeriesTimestamps]
- "control": Optional[NDArray[Shape["* num_times"], UByte]],
+ "control": Optional[NDArray[Shape["* num_times"], np.uint8]],
},
)
],
diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
index ef04312..84132d0 100644
--- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
+++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py
@@ -220,8 +220,8 @@ class DtypeMixin(ConfiguredBaseModel):
class Attribute(DtypeMixin):
name: str = Field(...)
- dims: Optional[List[Union[Any, str]]] = Field(default_factory=list)
- shape: Optional[List[Union[Any, int, str]]] = Field(default_factory=list)
+ dims: Optional[List[Union[Any, str]]] = Field(None)
+ shape: Optional[List[Union[Any, int, str]]] = Field(None)
value: Optional[Any] = Field(
None, description="""Optional constant, fixed value for the attribute."""
)
@@ -233,9 +233,7 @@ class Attribute(DtypeMixin):
True,
description="""Optional boolean key describing whether the attribute is required. Default value is True.""",
)
- dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
- default_factory=list
- )
+ dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None)
class Dataset(DtypeMixin):
@@ -250,8 +248,8 @@ class Dataset(DtypeMixin):
)
name: Optional[str] = Field(None)
default_name: Optional[str] = Field(None)
- dims: Optional[List[Union[Any, str]]] = Field(default_factory=list)
- shape: Optional[List[Union[Any, int, str]]] = Field(default_factory=list)
+ dims: Optional[List[Union[Any, str]]] = Field(None)
+ shape: Optional[List[Union[Any, int, str]]] = Field(None)
value: Optional[Any] = Field(
None, description="""Optional constant, fixed value for the attribute."""
)
@@ -261,7 +259,5 @@ class Dataset(DtypeMixin):
doc: str = Field(..., description="""Description of corresponding object.""")
quantity: Optional[Union[QuantityEnum, int]] = Field(1)
linkable: Optional[bool] = Field(None)
- attributes: Optional[List[Attribute]] = Field(default_factory=list)
- dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
- default_factory=list
- )
+ attributes: Optional[List[Attribute]] = Field(None)
+ dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None)
diff --git a/pyproject.toml b/pyproject.toml
index 2ea2cdc..b8723db 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -58,6 +58,10 @@ select = [
"D210", "D211",
# emptiness
"D419",
+ # perf
+ "PERF",
+ # numpy
+ "NPY",
]
ignore = [
# annotations for *args and **kwargs
@@ -70,6 +74,8 @@ ignore = [
"UP006", "UP035",
# | for Union types (only supported >=3.10
"UP007", "UP038",
+ # syntax error in forward annotation with numpydantic
+ "F722"
]
fixable = ["ALL"]
diff --git a/scripts/generate_core.py b/scripts/generate_core.py
index e711e0e..ea037f9 100644
--- a/scripts/generate_core.py
+++ b/scripts/generate_core.py
@@ -1,7 +1,9 @@
-import pdb
import shutil
import os
+import sys
import traceback
+from pdb import post_mortem
+import subprocess
from argparse import ArgumentParser
from pathlib import Path
@@ -14,48 +16,57 @@ from rich import print
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.providers import LinkMLProvider, PydanticProvider
-from nwb_linkml.providers.git import NWB_CORE_REPO, GitRepo
+from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo
from nwb_linkml.io import schema as io
-def generate_core_yaml(output_path:Path, dry_run:bool=False):
+
+def generate_core_yaml(output_path: Path, dry_run: bool = False, hdmf_only: bool = False):
"""Just build the latest version of the core schema"""
- core = io.load_nwb_core()
+ core = io.load_nwb_core(hdmf_only=hdmf_only)
built_schemas = core.build().schemas
for schema in built_schemas:
- output_file = output_path / (schema.name + '.yaml')
+ output_file = output_path / (schema.name + ".yaml")
if not dry_run:
yaml_dumper.dump(schema, output_file)
-def generate_core_pydantic(yaml_path:Path, output_path:Path, dry_run:bool=False):
+
+def generate_core_pydantic(yaml_path: Path, output_path: Path, dry_run: bool = False):
"""Just generate the latest version of the core schema"""
- for schema in yaml_path.glob('*.yaml'):
- python_name = schema.stem.replace('.', '_').replace('-', '_')
- pydantic_file = (output_path / python_name).with_suffix('.py')
+ for schema in yaml_path.glob("*.yaml"):
+ python_name = schema.stem.replace(".", "_").replace("-", "_")
+ pydantic_file = (output_path / python_name).with_suffix(".py")
generator = NWBPydanticGenerator(
str(schema),
- pydantic_version='2',
+ pydantic_version="2",
emit_metadata=True,
gen_classvars=True,
- gen_slots=True
+ gen_slots=True,
)
gen_pydantic = generator.serialize()
if not dry_run:
- with open(pydantic_file, 'w') as pfile:
+ with open(pydantic_file, "w") as pfile:
pfile.write(gen_pydantic)
-def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False):
+
+def generate_versions(
+ yaml_path: Path,
+ pydantic_path: Path,
+ dry_run: bool = False,
+ repo: GitRepo = NWB_CORE_REPO,
+ hdmf_only=False,
+ pdb=False,
+):
"""
Generate linkml models for all versions
"""
- repo = GitRepo(NWB_CORE_REPO)
- #repo.clone(force=True)
+ # repo.clone(force=True)
repo.clone()
# use a directory underneath this one as the temporary directory rather than
# the default hidden one
- tmp_dir = Path(__file__).parent / '__tmp__'
+ tmp_dir = Path(__file__).parent / "__tmp__"
if tmp_dir.exists():
shutil.rmtree(tmp_dir)
tmp_dir.mkdir()
@@ -66,12 +77,14 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False):
failed_versions = {}
overall_progress = Progress()
- overall_task = overall_progress.add_task('All Versions', total=len(NWB_CORE_REPO.versions))
+ overall_task = overall_progress.add_task("All Versions", total=len(NWB_CORE_REPO.versions))
build_progress = Progress(
- TextColumn("[bold blue]{task.fields[name]} - [bold green]{task.fields[action]}",
- table_column=Column(ratio=1)),
- BarColumn(table_column=Column(ratio=1), bar_width=None)
+ TextColumn(
+ "[bold blue]{task.fields[name]} - [bold green]{task.fields[action]}",
+ table_column=Column(ratio=1),
+ ),
+ BarColumn(table_column=Column(ratio=1), bar_width=None),
)
panel = Panel(Group(build_progress, overall_progress))
@@ -81,45 +94,60 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False):
linkml_task = None
pydantic_task = None
- for version in NWB_CORE_REPO.versions:
+ for version in repo.namespace.versions:
# build linkml
try:
# check out the version (this should also refresh the hdmf-common schema)
- linkml_task = build_progress.add_task('', name=version, action='Checkout Version', total=3)
+ linkml_task = build_progress.add_task(
+ "", name=version, action="Checkout Version", total=3
+ )
repo.tag = version
build_progress.update(linkml_task, advance=1, action="Load Namespaces")
- # first load the core namespace
- core_ns = io.load_namespace_adapter(repo.namespace_file)
- # then the hdmf-common namespace
- hdmf_common_ns = io.load_namespace_adapter(repo.temp_directory / 'hdmf-common-schema' / 'common' / 'namespace.yaml')
- core_ns.imported.append(hdmf_common_ns)
- build_progress.update(linkml_task, advance=1, action="Build LinkML")
+ if repo.namespace == NWB_CORE_REPO:
+ # first load HDMF common
+ hdmf_common_ns = io.load_namespace_adapter(
+ repo.temp_directory / "hdmf-common-schema" / "common" / "namespace.yaml"
+ )
+ # then load nwb core
+ core_ns = io.load_namespace_adapter(
+ repo.namespace_file, imported=[hdmf_common_ns]
+ )
+ else:
+ # otherwise just load HDMF
+ core_ns = io.load_namespace_adapter(repo.namespace_file)
+
+ build_progress.update(linkml_task, advance=1, action="Build LinkML")
linkml_res = linkml_provider.build(core_ns)
build_progress.update(linkml_task, advance=1, action="Built LinkML")
# build pydantic
- ns_files = [res['namespace'] for res in linkml_res.values()]
+ ns_files = [res.namespace for res in linkml_res.values()]
- pydantic_task = build_progress.add_task('', name=version, action='', total=len(ns_files))
+ pydantic_task = build_progress.add_task(
+ "", name=version, action="", total=len(ns_files)
+ )
for schema in ns_files:
- pbar_string = ' - '.join([schema.parts[-3], schema.parts[-2], schema.parts[-1]])
+ pbar_string = schema.parts[-3]
build_progress.update(pydantic_task, action=pbar_string)
pydantic_provider.build(schema, versions=core_ns.versions, split=True)
build_progress.update(pydantic_task, advance=1)
- build_progress.update(pydantic_task, action='Built Pydantic')
-
-
+ build_progress.update(pydantic_task, action="Built Pydantic")
except Exception as e:
+ if pdb:
+ live.stop()
+ post_mortem()
+ sys.exit(1)
+
build_progress.stop_task(linkml_task)
if linkml_task is not None:
- build_progress.update(linkml_task, action='[bold red]LinkML Build Failed')
+ build_progress.update(linkml_task, action="[bold red]LinkML Build Failed")
build_progress.stop_task(linkml_task)
if pydantic_task is not None:
- build_progress.update(pydantic_task, action='[bold red]LinkML Build Failed')
+ build_progress.update(pydantic_task, action="[bold red]LinkML Build Failed")
build_progress.stop_task(pydantic_task)
failed_versions[version] = traceback.format_exception(e)
@@ -129,74 +157,90 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False):
pydantic_task = None
if not dry_run:
- shutil.rmtree(yaml_path / 'linkml')
- shutil.rmtree(pydantic_path / 'pydantic')
- shutil.move(tmp_dir / 'linkml', yaml_path)
- shutil.move(tmp_dir / 'pydantic', pydantic_path)
+ if hdmf_only:
+ shutil.rmtree(yaml_path / "linkml" / "hdmf_common")
+ shutil.rmtree(yaml_path / "linkml" / "hdmf_experimental")
+ shutil.rmtree(pydantic_path / "pydantic" / "hdmf_common")
+ shutil.rmtree(pydantic_path / "pydantic" / "hdmf_experimental")
+ shutil.move(tmp_dir / "linkml" / "hdmf_common", yaml_path / "linkml")
+ shutil.move(tmp_dir / "linkml" / "hdmf_experimental", yaml_path / "linkml")
+ shutil.move(tmp_dir / "pydantic" / "hdmf_common", pydantic_path / "pydantic")
+ shutil.move(tmp_dir / "pydantic" / "hdmf_experimental", pydantic_path / "pydantic")
+ else:
+ shutil.rmtree(yaml_path / "linkml")
+ shutil.rmtree(pydantic_path / "pydantic")
+ shutil.move(tmp_dir / "linkml", yaml_path)
+ shutil.move(tmp_dir / "pydantic", pydantic_path)
# import the most recent version of the schemaz we built
- latest_version = sorted((pydantic_path / 'pydantic' / 'core').iterdir(), key=os.path.getmtime)[-1]
+ latest_version = sorted(
+ (pydantic_path / "pydantic" / "core").glob("v*"), key=os.path.getmtime
+ )[-1]
# make inits to use the schema! we don't usually do this in the
# provider class because we directly import the files there.
- with open(pydantic_path / 'pydantic' / '__init__.py', 'w') as initfile:
- initfile.write(' ')
+ with open(pydantic_path / "pydantic" / "__init__.py", "w") as initfile:
+ initfile.write(" ")
- with open(pydantic_path / '__init__.py', 'w') as initfile:
- initfile.write(f'from .pydantic.core.{latest_version.name}.namespace import *')
+ with open(pydantic_path / "__init__.py", "w") as initfile:
+ initfile.write(f"from .pydantic.core.{latest_version.name}.namespace import *")
+
+ subprocess.run(["black", "."])
finally:
if len(failed_versions) > 0:
- print('Failed Building Versions:')
+ print("Failed Building Versions:")
print(failed_versions)
-
-
def parser() -> ArgumentParser:
- parser = ArgumentParser('Generate all available versions of NWB core schema')
+ parser = ArgumentParser("Generate all available versions of NWB core schema")
parser.add_argument(
- '--yaml',
+ "--yaml",
help="directory to export linkML schema to",
type=Path,
- default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'schema'
+ default=Path(__file__).parent.parent / "nwb_linkml" / "src" / "nwb_linkml" / "schema",
)
parser.add_argument(
- '--pydantic',
+ "--pydantic",
help="directory to export pydantic models",
type=Path,
- default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'models'
+ default=Path(__file__).parent.parent / "nwb_linkml" / "src" / "nwb_linkml" / "models",
)
+ parser.add_argument("--hdmf", help="Only generate the HDMF namespaces", action="store_true")
parser.add_argument(
- '--latest',
+ "--latest",
help="Only generate the latest version of the core schemas.",
- action="store_true"
+ action="store_true",
)
parser.add_argument(
- '--dry-run',
- help="Generate schema and pydantic models without moving them into the target directories, for testing purposes",
- action='store_true'
+ "--dry-run",
+ help=(
+ "Generate schema and pydantic models without moving them into the target directories,"
+ " for testing purposes"
+ ),
+ action="store_true",
)
+ parser.add_argument("--pdb", help="Launch debugger on an error", action="store_true")
return parser
def main():
args = parser().parse_args()
+ if args.hdmf:
+ repo = GitRepo(HDMF_COMMON_REPO)
+ else:
+ repo = GitRepo(NWB_CORE_REPO)
+
if not args.dry_run:
args.yaml.mkdir(exist_ok=True)
args.pydantic.mkdir(exist_ok=True)
if args.latest:
- generate_core_yaml(args.yaml, args.dry_run)
+ generate_core_yaml(args.yaml, args.dry_run, args.hdmf)
generate_core_pydantic(args.yaml, args.pydantic, args.dry_run)
else:
- generate_versions(args.yaml, args.pydantic, args.dry_run)
+ generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf, pdb=args.pdb)
+
if __name__ == "__main__":
main()
-
-
-
-
-
-
-