Merge pull request #10 from p2p-ld/nwb-loader
Some checks failed
Lint / Ruff Linting (push) Has been cancelled
Lint / Black Formatting (push) Has been cancelled
Lint / Check for spelling errors (push) Has been cancelled
Model Rebuild / build_models (push) Has been cancelled
Tests / test (3.10) (push) Has been cancelled
Tests / test (3.11) (push) Has been cancelled
Tests / test (3.12) (push) Has been cancelled
Tests / finish-coverage (push) Has been cancelled

NWB Loader
This commit is contained in:
Jonny Saunders 2024-09-11 21:24:37 -07:00 committed by GitHub
commit f94a144d75
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
331 changed files with 11758 additions and 3449 deletions

View file

@ -36,7 +36,10 @@ jobs:
nwb_models/pyproject.toml nwb_models/pyproject.toml
- name: Install dependencies - name: Install dependencies
run: pip install -e .[tests] run: |
pip install -e .[tests]
pip install -e ../nwb_schema_language
pip install -e ../nwb_models
working-directory: nwb_linkml working-directory: nwb_linkml
- name: Run Tests - name: Run Tests

View file

@ -49,6 +49,10 @@ Remove monkeypatches/overrides once PRs are closed
Tests Tests
- [ ] Ensure schemas and pydantic modules in repos are up to date - [ ] Ensure schemas and pydantic modules in repos are up to date
Loading
- [ ] Top-level containers are still a little janky, eg. how `ProcessingModule` just accepts
extra args rather than properly abstracting `value` as a `__getitem__(self, key) -> T:`
## Docs TODOs ## Docs TODOs
```{todolist} ```{todolist}

View file

@ -71,7 +71,7 @@ adapter_parser = Sybil(
doctest_parser = Sybil( doctest_parser = Sybil(
parsers=[DocTestParser(optionflags=ELLIPSIS + NORMALIZE_WHITESPACE), PythonCodeBlockParser()], parsers=[DocTestParser(optionflags=ELLIPSIS + NORMALIZE_WHITESPACE), PythonCodeBlockParser()],
patterns=["*.py"], patterns=["providers/git.py"],
) )
pytest_collect_file = (adapter_parser + doctest_parser).pytest() pytest_collect_file = (adapter_parser + doctest_parser).pytest()

View file

@ -5,7 +5,7 @@
groups = ["default", "dev", "plot", "tests"] groups = ["default", "dev", "plot", "tests"]
strategy = ["inherit_metadata"] strategy = ["inherit_metadata"]
lock_version = "4.5.0" lock_version = "4.5.0"
content_hash = "sha256:f219083028bd024c53bc55626c8b6088d6eb5c2ade56bd694a7a112098aa9bfc" content_hash = "sha256:1c297e11f6dc9e4f6b8d29df872177d2ce65bbd334c0b65aa5175dfb125c4d9f"
[[metadata.targets]] [[metadata.targets]]
requires_python = ">=3.10,<3.13" requires_python = ">=3.10,<3.13"
@ -549,7 +549,7 @@ name = "h5py"
version = "3.11.0" version = "3.11.0"
requires_python = ">=3.8" requires_python = ">=3.8"
summary = "Read and write HDF5 files from Python" summary = "Read and write HDF5 files from Python"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"numpy>=1.17.3", "numpy>=1.17.3",
] ]
@ -580,6 +580,26 @@ files = [
{file = "hbreader-0.9.1.tar.gz", hash = "sha256:d2c132f8ba6276d794c66224c3297cec25c8079d0a4cf019c061611e0a3b94fa"}, {file = "hbreader-0.9.1.tar.gz", hash = "sha256:d2c132f8ba6276d794c66224c3297cec25c8079d0a4cf019c061611e0a3b94fa"},
] ]
[[package]]
name = "hdmf"
version = "3.14.3"
requires_python = ">=3.8"
summary = "A hierarchical data modeling framework for modern science data standards"
groups = ["dev", "tests"]
dependencies = [
"h5py>=2.10",
"importlib-resources; python_version < \"3.9\"",
"jsonschema>=2.6.0",
"numpy>=1.18",
"pandas>=1.0.5",
"ruamel-yaml>=0.16",
"scipy>=1.4",
]
files = [
{file = "hdmf-3.14.3-py3-none-any.whl", hash = "sha256:1417ccc0d336d535192b7a3db4c7354cbc15123f1ccb3cdd82e363308e78f9bc"},
{file = "hdmf-3.14.3.tar.gz", hash = "sha256:e9548fc7bdbb534a2750092b6b9819df2ce50e27430866c3c32061a2306271cc"},
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.8" version = "3.8"
@ -751,7 +771,7 @@ name = "jsonschema"
version = "4.23.0" version = "4.23.0"
requires_python = ">=3.8" requires_python = ">=3.8"
summary = "An implementation of JSON Schema validation for Python" summary = "An implementation of JSON Schema validation for Python"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"attrs>=22.2.0", "attrs>=22.2.0",
"importlib-resources>=1.4.0; python_version < \"3.9\"", "importlib-resources>=1.4.0; python_version < \"3.9\"",
@ -770,7 +790,7 @@ name = "jsonschema-specifications"
version = "2023.12.1" version = "2023.12.1"
requires_python = ">=3.8" requires_python = ">=3.8"
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"importlib-resources>=1.4.0; python_version < \"3.9\"", "importlib-resources>=1.4.0; python_version < \"3.9\"",
"referencing>=0.31.0", "referencing>=0.31.0",
@ -976,7 +996,7 @@ name = "networkx"
version = "3.3" version = "3.3"
requires_python = ">=3.10" requires_python = ">=3.10"
summary = "Python package for creating and manipulating graphs and networks" summary = "Python package for creating and manipulating graphs and networks"
groups = ["dev", "tests"] groups = ["default", "dev", "tests"]
files = [ files = [
{file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
{file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
@ -984,45 +1004,36 @@ files = [
[[package]] [[package]]
name = "numpy" name = "numpy"
version = "2.1.0" version = "1.26.4"
requires_python = ">=3.10" requires_python = ">=3.9"
summary = "Fundamental package for array computing in Python" summary = "Fundamental package for array computing in Python"
groups = ["default"] groups = ["default", "dev", "tests"]
files = [ files = [
{file = "numpy-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6326ab99b52fafdcdeccf602d6286191a79fe2fda0ae90573c5814cd2b0bc1b8"}, {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0937e54c09f7a9a68da6889362ddd2ff584c02d015ec92672c099b61555f8911"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
{file = "numpy-2.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:30014b234f07b5fec20f4146f69e13cfb1e33ee9a18a1879a0142fbb00d47673"}, {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
{file = "numpy-2.1.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:899da829b362ade41e1e7eccad2cf274035e1cb36ba73034946fccd4afd8606b"}, {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
{file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08801848a40aea24ce16c2ecde3b756f9ad756586fb2d13210939eb69b023f5b"}, {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
{file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:398049e237d1aae53d82a416dade04defed1a47f87d18d5bd615b6e7d7e41d1f"}, {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
{file = "numpy-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0abb3916a35d9090088a748636b2c06dc9a6542f99cd476979fb156a18192b84"}, {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
{file = "numpy-2.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e2350aea18d04832319aac0f887d5fcec1b36abd485d14f173e3e900b83e33"}, {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
{file = "numpy-2.1.0-cp310-cp310-win32.whl", hash = "sha256:f6b26e6c3b98adb648243670fddc8cab6ae17473f9dc58c51574af3e64d61211"}, {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
{file = "numpy-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:f505264735ee074250a9c78247ee8618292091d9d1fcc023290e9ac67e8f1afa"}, {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
{file = "numpy-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:76368c788ccb4f4782cf9c842b316140142b4cbf22ff8db82724e82fe1205dce"}, {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
{file = "numpy-2.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8e93a01a35be08d31ae33021e5268f157a2d60ebd643cfc15de6ab8e4722eb1"}, {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
{file = "numpy-2.1.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9523f8b46485db6939bd069b28b642fec86c30909cea90ef550373787f79530e"}, {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
{file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54139e0eb219f52f60656d163cbe67c31ede51d13236c950145473504fa208cb"}, {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
{file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebbf9fbdabed208d4ecd2e1dfd2c0741af2f876e7ae522c2537d404ca895c3"}, {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
{file = "numpy-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:378cb4f24c7d93066ee4103204f73ed046eb88f9ad5bb2275bb9fa0f6a02bd36"}, {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
{file = "numpy-2.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8f699a709120b220dfe173f79c73cb2a2cab2c0b88dd59d7b49407d032b8ebd"}, {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
{file = "numpy-2.1.0-cp311-cp311-win32.whl", hash = "sha256:ffbd6faeb190aaf2b5e9024bac9622d2ee549b7ec89ef3a9373fa35313d44e0e"}, {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
{file = "numpy-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0af3a5987f59d9c529c022c8c2a64805b339b7ef506509fba7d0556649b9714b"}, {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
{file = "numpy-2.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe76d75b345dc045acdbc006adcb197cc680754afd6c259de60d358d60c93736"}, {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
{file = "numpy-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f358ea9e47eb3c2d6eba121ab512dfff38a88db719c38d1e67349af210bc7529"}, {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
{file = "numpy-2.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:dd94ce596bda40a9618324547cfaaf6650b1a24f5390350142499aa4e34e53d1"}, {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
{file = "numpy-2.1.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b47c551c6724960479cefd7353656498b86e7232429e3a41ab83be4da1b109e8"}, {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
{file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0756a179afa766ad7cb6f036de622e8a8f16ffdd55aa31f296c870b5679d745"}, {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
{file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24003ba8ff22ea29a8c306e61d316ac74111cebf942afbf692df65509a05f111"}, {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
{file = "numpy-2.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b34fa5e3b5d6dc7e0a4243fa0f81367027cb6f4a7215a17852979634b5544ee0"},
{file = "numpy-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4f982715e65036c34897eb598d64aef15150c447be2cfc6643ec7a11af06574"},
{file = "numpy-2.1.0-cp312-cp312-win32.whl", hash = "sha256:c4cd94dfefbefec3f8b544f61286584292d740e6e9d4677769bc76b8f41deb02"},
{file = "numpy-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0cdef204199278f5c461a0bed6ed2e052998276e6d8ab2963d5b5c39a0500bc"},
{file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:15ef8b2177eeb7e37dd5ef4016f30b7659c57c2c0b57a779f1d537ff33a72c7b"},
{file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e5f0642cdf4636198a4990de7a71b693d824c56a757862230454629cf62e323d"},
{file = "numpy-2.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15976718c004466406342789f31b6673776360f3b1e3c575f25302d7e789575"},
{file = "numpy-2.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6c1de77ded79fef664d5098a66810d4d27ca0224e9051906e634b3f7ead134c2"},
{file = "numpy-2.1.0.tar.gz", hash = "sha256:7dc90da0081f7e1da49ec4e398ede6a8e9cc4f5ebe5f9e06b443ed889ee9aaa2"},
] ]
[[package]] [[package]]
@ -1102,7 +1113,7 @@ name = "pandas"
version = "2.2.2" version = "2.2.2"
requires_python = ">=3.9" requires_python = ">=3.9"
summary = "Powerful data structures for data analysis, time series, and statistics" summary = "Powerful data structures for data analysis, time series, and statistics"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"numpy>=1.22.4; python_version < \"3.11\"", "numpy>=1.22.4; python_version < \"3.11\"",
"numpy>=1.23.2; python_version == \"3.11\"", "numpy>=1.23.2; python_version == \"3.11\"",
@ -1350,6 +1361,24 @@ files = [
{file = "PyJSG-0.11.10.tar.gz", hash = "sha256:4bd6e3ff2833fa2b395bbe803a2d72a5f0bab5b7285bccd0da1a1bc0aee88bfa"}, {file = "PyJSG-0.11.10.tar.gz", hash = "sha256:4bd6e3ff2833fa2b395bbe803a2d72a5f0bab5b7285bccd0da1a1bc0aee88bfa"},
] ]
[[package]]
name = "pynwb"
version = "2.8.1"
requires_python = ">=3.8"
summary = "Package for working with Neurodata stored in the NWB format."
groups = ["dev", "tests"]
dependencies = [
"h5py>=2.10",
"hdmf>=3.14.0",
"numpy<2.0,>=1.18",
"pandas>=1.1.5",
"python-dateutil>=2.7.3",
]
files = [
{file = "pynwb-2.8.1-py3-none-any.whl", hash = "sha256:f3c392652b26396e135cf6f1abd570d413c9eb7bf5bdb1a89d899852338fdf6c"},
{file = "pynwb-2.8.1.tar.gz", hash = "sha256:498e4bc46a7b0a1331a0f754bac72ea7f9d10d1bba35af3c7be78a61bb1d104b"},
]
[[package]] [[package]]
name = "pyparsing" name = "pyparsing"
version = "3.1.4" version = "3.1.4"
@ -1469,7 +1498,7 @@ name = "python-dateutil"
version = "2.9.0.post0" version = "2.9.0.post0"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
summary = "Extensions to the standard Python datetime module" summary = "Extensions to the standard Python datetime module"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"six>=1.5", "six>=1.5",
] ]
@ -1506,7 +1535,7 @@ files = [
name = "pytz" name = "pytz"
version = "2024.1" version = "2024.1"
summary = "World timezone definitions, modern and historical" summary = "World timezone definitions, modern and historical"
groups = ["default"] groups = ["default", "dev", "tests"]
files = [ files = [
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
@ -1597,7 +1626,7 @@ name = "referencing"
version = "0.35.1" version = "0.35.1"
requires_python = ">=3.8" requires_python = ">=3.8"
summary = "JSON Referencing + Python" summary = "JSON Referencing + Python"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"attrs>=22.2.0", "attrs>=22.2.0",
"rpds-py>=0.7.0", "rpds-py>=0.7.0",
@ -1701,7 +1730,7 @@ name = "rpds-py"
version = "0.20.0" version = "0.20.0"
requires_python = ">=3.8" requires_python = ">=3.8"
summary = "Python bindings to Rust's persistent data structures (rpds)" summary = "Python bindings to Rust's persistent data structures (rpds)"
groups = ["default"] groups = ["default", "dev", "tests"]
files = [ files = [
{file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
{file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
@ -1762,7 +1791,7 @@ name = "ruamel-yaml"
version = "0.18.6" version = "0.18.6"
requires_python = ">=3.7" requires_python = ">=3.7"
summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
groups = ["default"] groups = ["default", "dev", "tests"]
dependencies = [ dependencies = [
"ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"", "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"",
] ]
@ -1776,7 +1805,7 @@ name = "ruamel-yaml-clib"
version = "0.2.8" version = "0.2.8"
requires_python = ">=3.6" requires_python = ">=3.6"
summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
groups = ["default"] groups = ["default", "dev", "tests"]
marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""
files = [ files = [
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"},
@ -1833,6 +1862,43 @@ files = [
{file = "ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be"}, {file = "ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be"},
] ]
[[package]]
name = "scipy"
version = "1.14.1"
requires_python = ">=3.10"
summary = "Fundamental algorithms for scientific computing in Python"
groups = ["dev", "tests"]
dependencies = [
"numpy<2.3,>=1.23.5",
]
files = [
{file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"},
{file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"},
{file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"},
{file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"},
{file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"},
{file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"},
{file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"},
{file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"},
{file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"},
{file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"},
{file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"},
{file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"},
{file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"},
{file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"},
{file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"},
{file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"},
{file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"},
{file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"},
{file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"},
{file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"},
{file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"},
{file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"},
{file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"},
{file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"},
{file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"},
]
[[package]] [[package]]
name = "setuptools" name = "setuptools"
version = "74.0.0" version = "74.0.0"
@ -2023,7 +2089,7 @@ name = "tzdata"
version = "2024.1" version = "2024.1"
requires_python = ">=2" requires_python = ">=2"
summary = "Provider of IANA time zone data" summary = "Provider of IANA time zone data"
groups = ["default"] groups = ["default", "dev", "tests"]
files = [ files = [
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},

View file

@ -9,7 +9,7 @@ license = {text = "AGPL-3.0"}
readme = "README.md" readme = "README.md"
requires-python = "<3.13,>=3.10" requires-python = "<3.13,>=3.10"
dependencies = [ dependencies = [
"nwb-models>=0.1.0", "nwb-models>=0.2.0",
"pyyaml>=6.0", "pyyaml>=6.0",
"linkml-runtime>=1.7.7", "linkml-runtime>=1.7.7",
"nwb-schema-language>=0.1.3", "nwb-schema-language>=0.1.3",
@ -22,9 +22,10 @@ dependencies = [
"pydantic-settings>=2.0.3", "pydantic-settings>=2.0.3",
"tqdm>=4.66.1", "tqdm>=4.66.1",
'typing-extensions>=4.12.2;python_version<"3.11"', 'typing-extensions>=4.12.2;python_version<"3.11"',
"numpydantic>=1.3.3", "numpydantic>=1.5.0",
"black>=24.4.2", "black>=24.4.2",
"pandas>=2.2.2", "pandas>=2.2.2",
"networkx>=3.3",
] ]
[project.urls] [project.urls]
@ -44,6 +45,7 @@ tests = [
"pytest-cov<5.0.0,>=4.1.0", "pytest-cov<5.0.0,>=4.1.0",
"sybil>=6.0.3", "sybil>=6.0.3",
"requests-cache>=1.2.1", "requests-cache>=1.2.1",
"pynwb>=2.8.1",
] ]
dev = [ dev = [
"nwb-linkml[tests]", "nwb-linkml[tests]",

View file

@ -2,6 +2,7 @@
Base class for adapters Base class for adapters
""" """
import os
import sys import sys
from abc import abstractmethod from abc import abstractmethod
from dataclasses import dataclass, field from dataclasses import dataclass, field
@ -101,6 +102,19 @@ class Adapter(BaseModel):
"""Abstract base class for adapters""" """Abstract base class for adapters"""
_logger: Optional[Logger] = None _logger: Optional[Logger] = None
_debug: Optional[bool] = None
@property
def debug(self) -> bool:
"""
Whether we are in debug mode, which adds extra metadata in generated elements.
Set explicitly via ``_debug`` , or else checks for the truthiness of the
environment variable ``NWB_LINKML_DEBUG``
"""
if self._debug is None:
self._debug = bool(os.environ.get("NWB_LINKML_DEBUG", False))
return self._debug
@property @property
def logger(self) -> Logger: def logger(self) -> Logger:

View file

@ -10,7 +10,7 @@ from linkml_runtime.linkml_model.meta import SlotDefinition
from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d
from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.maps import Map from nwb_linkml.maps import Map
from nwb_linkml.maps.dtype import handle_dtype from nwb_linkml.maps.dtype import handle_dtype, inlined
from nwb_schema_language import Attribute from nwb_schema_language import Attribute
@ -104,6 +104,7 @@ class MapScalar(AttributeMap):
range=handle_dtype(attr.dtype), range=handle_dtype(attr.dtype),
description=attr.doc, description=attr.doc,
required=attr.required, required=attr.required,
inlined=inlined(attr.dtype),
**cls.handle_defaults(attr), **cls.handle_defaults(attr),
) )
return BuildResult(slots=[slot]) return BuildResult(slots=[slot])
@ -151,6 +152,7 @@ class MapArray(AttributeMap):
multivalued=multivalued, multivalued=multivalued,
description=attr.doc, description=attr.doc,
required=attr.required, required=attr.required,
inlined=inlined(attr.dtype),
**expressions, **expressions,
**cls.handle_defaults(attr), **cls.handle_defaults(attr),
) )
@ -171,7 +173,10 @@ class AttributeAdapter(Adapter):
Build the slot definitions, every attribute should have a map. Build the slot definitions, every attribute should have a map.
""" """
map = self.match() map = self.match()
return map.apply(self.cls) res = map.apply(self.cls)
if self.debug: # pragma: no cover - only used in development
res = self._amend_debug(res, map)
return res
def match(self) -> Optional[Type[AttributeMap]]: def match(self) -> Optional[Type[AttributeMap]]:
""" """
@ -195,3 +200,13 @@ class AttributeAdapter(Adapter):
return None return None
else: else:
return matches[0] return matches[0]
def _amend_debug(
self, res: BuildResult, map: Optional[Type[AttributeMap]] = None
) -> BuildResult: # pragma: no cover - only used in development
map_name = "None" if map is None else map.__name__
for cls in res.classes:
cls.annotations["attribute_map"] = {"tag": "attribute_map", "value": map_name}
for slot in res.slots:
slot.annotations["attribute_map"] = {"tag": "attribute_map", "value": map_name}
return res

View file

@ -92,6 +92,13 @@ class ClassAdapter(Adapter):
# Get vanilla top-level attributes # Get vanilla top-level attributes
kwargs["attributes"].extend(self.build_attrs(self.cls)) kwargs["attributes"].extend(self.build_attrs(self.cls))
if self.debug: # pragma: no cover - only used in development
kwargs["annotations"] = {}
kwargs["annotations"]["group_adapter"] = {
"tag": "group_adapter",
"value": "container_slot",
}
if extra_attrs is not None: if extra_attrs is not None:
if isinstance(extra_attrs, SlotDefinition): if isinstance(extra_attrs, SlotDefinition):
extra_attrs = [extra_attrs] extra_attrs = [extra_attrs]
@ -230,18 +237,23 @@ class ClassAdapter(Adapter):
ifabsent=f"string({name})", ifabsent=f"string({name})",
equals_string=equals_string, equals_string=equals_string,
range="string", range="string",
identifier=True,
) )
else: else:
name_slot = SlotDefinition(name="name", required=True, range="string") name_slot = SlotDefinition(name="name", required=True, range="string", identifier=True)
return name_slot return name_slot
def build_self_slot(self) -> SlotDefinition: def build_self_slot(self) -> SlotDefinition:
""" """
If we are a child class, we make a slot so our parent can refer to us If we are a child class, we make a slot so our parent can refer to us
""" """
return SlotDefinition( slot = SlotDefinition(
name=self._get_slot_name(), name=self._get_slot_name(),
description=self.cls.doc, description=self.cls.doc,
range=self._get_full_name(), range=self._get_full_name(),
inlined=True,
**QUANTITY_MAP[self.cls.quantity], **QUANTITY_MAP[self.cls.quantity],
) )
if self.debug: # pragma: no cover - only used in development
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "self_slot"}
return slot

View file

@ -11,7 +11,7 @@ from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compou
from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.adapters.classes import ClassAdapter from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map from nwb_linkml.maps import QUANTITY_MAP, Map
from nwb_linkml.maps.dtype import flat_to_linkml, handle_dtype from nwb_linkml.maps.dtype import flat_to_linkml, handle_dtype, inlined
from nwb_linkml.maps.naming import camel_to_snake from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import Dataset from nwb_schema_language import Dataset
@ -147,6 +147,7 @@ class MapScalarAttributes(DatasetMap):
name: name:
name: name name: name
ifabsent: string(starting_time) ifabsent: string(starting_time)
identifier: true
range: string range: string
required: true required: true
equals_string: starting_time equals_string: starting_time
@ -245,6 +246,7 @@ class MapListlike(DatasetMap):
attributes: attributes:
name: name:
name: name name: name
identifier: true
range: string range: string
required: true required: true
value: value:
@ -257,6 +259,8 @@ class MapListlike(DatasetMap):
range: Image range: Image
required: true required: true
multivalued: true multivalued: true
inlined: true
inlined_as_list: true
tree_root: true tree_root: true
""" """
@ -299,6 +303,8 @@ class MapListlike(DatasetMap):
description=cls.doc, description=cls.doc,
required=cls.quantity not in ("*", "?"), required=cls.quantity not in ("*", "?"),
annotations=[{"source_type": "reference"}], annotations=[{"source_type": "reference"}],
inlined=True,
inlined_as_list=True,
) )
res.classes[0].attributes["value"] = slot res.classes[0].attributes["value"] = slot
return res return res
@ -384,13 +390,11 @@ class MapArraylike(DatasetMap):
- ``False`` - ``False``
""" """
dtype = handle_dtype(cls.dtype)
return ( return (
cls.name cls.name
and (all([cls.dims, cls.shape]) or cls.neurodata_type_inc == "VectorData") and (all([cls.dims, cls.shape]) or cls.neurodata_type_inc == "VectorData")
and not has_attrs(cls) and not has_attrs(cls)
and not is_compound(cls) and not is_compound(cls)
and dtype in flat_to_linkml
) )
@classmethod @classmethod
@ -418,6 +422,7 @@ class MapArraylike(DatasetMap):
range=handle_dtype(cls.dtype), range=handle_dtype(cls.dtype),
description=cls.doc, description=cls.doc,
required=cls.quantity not in ("*", "?"), required=cls.quantity not in ("*", "?"),
inlined=inlined(cls.dtype),
**expressions, **expressions,
) )
] ]
@ -430,6 +435,10 @@ class MapArrayLikeAttributes(DatasetMap):
The most general case - treat everything that isn't handled by one of the special cases The most general case - treat everything that isn't handled by one of the special cases
as an array! as an array!
We specifically include classes that have no attributes but also don't have a name,
as they still require their own class (unlike :class:`.MapArrayLike` above, where we
just generate an anonymous slot.)
Examples: Examples:
.. adapter:: DatasetAdapter .. adapter:: DatasetAdapter
@ -478,6 +487,7 @@ class MapArrayLikeAttributes(DatasetMap):
attributes: attributes:
name: name:
name: name name: name
identifier: true
range: string range: string
required: true required: true
resolution: resolution:
@ -525,7 +535,7 @@ class MapArrayLikeAttributes(DatasetMap):
return ( return (
all([cls.dims, cls.shape]) all([cls.dims, cls.shape])
and cls.neurodata_type_inc != "VectorData" and cls.neurodata_type_inc != "VectorData"
and has_attrs(cls) and (has_attrs(cls) or not cls.name)
and not is_compound(cls) and not is_compound(cls)
and (dtype == "AnyType" or dtype in flat_to_linkml) and (dtype == "AnyType" or dtype in flat_to_linkml)
) )
@ -540,7 +550,9 @@ class MapArrayLikeAttributes(DatasetMap):
array_adapter = ArrayAdapter(cls.dims, cls.shape) array_adapter = ArrayAdapter(cls.dims, cls.shape)
expressions = array_adapter.make_slot() expressions = array_adapter.make_slot()
# make a slot for the arraylike class # make a slot for the arraylike class
array_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), **expressions) array_slot = SlotDefinition(
name="value", range=handle_dtype(cls.dtype), inlined=inlined(cls.dtype), **expressions
)
res.classes[0].attributes.update({"value": array_slot}) res.classes[0].attributes.update({"value": array_slot})
return res return res
@ -579,6 +591,7 @@ class MapClassRange(DatasetMap):
description=cls.doc, description=cls.doc,
range=f"{cls.neurodata_type_inc}", range=f"{cls.neurodata_type_inc}",
annotations=[{"named": True}, {"source_type": "neurodata_type_inc"}], annotations=[{"named": True}, {"source_type": "neurodata_type_inc"}],
inlined=True,
**QUANTITY_MAP[cls.quantity], **QUANTITY_MAP[cls.quantity],
) )
res = BuildResult(slots=[this_slot]) res = BuildResult(slots=[this_slot])
@ -590,102 +603,6 @@ class MapClassRange(DatasetMap):
# -------------------------------------------------- # --------------------------------------------------
class MapVectorClassRange(DatasetMap):
"""
Map a ``VectorData`` class that is a reference to another class as simply
a multivalued slot range, rather than an independent class
"""
@classmethod
def check(c, cls: Dataset) -> bool:
"""
Check that we are a VectorData object without any additional attributes
with a dtype that refers to another class
"""
dtype = handle_dtype(cls.dtype)
return (
cls.neurodata_type_inc == "VectorData"
and cls.name
and not has_attrs(cls)
and not (cls.shape or cls.dims)
and not is_compound(cls)
and dtype not in flat_to_linkml
)
@classmethod
def apply(
c, cls: Dataset, res: Optional[BuildResult] = None, name: Optional[str] = None
) -> BuildResult:
"""
Create a slot that replaces the base class just as a list[ClassRef]
"""
this_slot = SlotDefinition(
name=cls.name,
description=cls.doc,
multivalued=True,
range=handle_dtype(cls.dtype),
required=cls.quantity not in ("*", "?"),
)
res = BuildResult(slots=[this_slot])
return res
#
# class Map1DVector(DatasetMap):
# """
# ``VectorData`` is subclassed with a name but without dims or attributes,
# treat this as a normal 1D array slot that replaces any class that would be built for this
#
# eg. all the datasets in epoch.TimeIntervals:
#
# .. code-block:: yaml
#
# groups:
# - neurodata_type_def: TimeIntervals
# neurodata_type_inc: DynamicTable
# doc: A container for aggregating epoch data and the TimeSeries that each epoch applies
# to.
# datasets:
# - name: start_time
# neurodata_type_inc: VectorData
# dtype: float32
# doc: Start time of epoch, in seconds.
#
# """
#
# @classmethod
# def check(c, cls: Dataset) -> bool:
# """
# Check that we're a 1d VectorData class
# """
# return (
# cls.neurodata_type_inc == "VectorData"
# and not cls.dims
# and not cls.shape
# and not cls.attributes
# and not cls.neurodata_type_def
# and not is_compound(cls)
# and cls.name
# )
#
# @classmethod
# def apply(
# c, cls: Dataset, res: Optional[BuildResult] = None, name: Optional[str] = None
# ) -> BuildResult:
# """
# Return a simple multivalued slot
# """
# this_slot = SlotDefinition(
# name=cls.name,
# description=cls.doc,
# range=handle_dtype(cls.dtype),
# multivalued=True,
# )
# # No need to make a class for us, so we replace the existing build results
# res = BuildResult(slots=[this_slot])
# return res
class MapNVectors(DatasetMap): class MapNVectors(DatasetMap):
""" """
An unnamed container that indicates an arbitrary quantity of some other neurodata type. An unnamed container that indicates an arbitrary quantity of some other neurodata type.
@ -795,6 +712,7 @@ class MapCompoundDtype(DatasetMap):
description=a_dtype.doc, description=a_dtype.doc,
range=handle_dtype(a_dtype.dtype), range=handle_dtype(a_dtype.dtype),
array=ArrayExpression(exact_number_dimensions=1), array=ArrayExpression(exact_number_dimensions=1),
inlined=inlined(a_dtype.dtype),
**QUANTITY_MAP[cls.quantity], **QUANTITY_MAP[cls.quantity],
) )
res.classes[0].attributes.update(slots) res.classes[0].attributes.update(slots)
@ -826,6 +744,8 @@ class DatasetAdapter(ClassAdapter):
if map is not None: if map is not None:
res = map.apply(self.cls, res, self._get_full_name()) res = map.apply(self.cls, res, self._get_full_name())
if self.debug: # pragma: no cover - only used in development
res = self._amend_debug(res, map)
return res return res
def match(self) -> Optional[Type[DatasetMap]]: def match(self) -> Optional[Type[DatasetMap]]:
@ -850,3 +770,13 @@ class DatasetAdapter(ClassAdapter):
return None return None
else: else:
return matches[0] return matches[0]
def _amend_debug(
self, res: BuildResult, map: Optional[Type[DatasetMap]] = None
) -> BuildResult: # pragma: no cover - only used in development
map_name = "None" if map is None else map.__name__
for cls in res.classes:
cls.annotations["dataset_map"] = {"tag": "dataset_map", "value": map_name}
for slot in res.slots:
slot.annotations["dataset_map"] = {"tag": "dataset_map", "value": map_name}
return res

View file

@ -68,11 +68,17 @@ class GroupAdapter(ClassAdapter):
if not self.cls.links: if not self.cls.links:
return [] return []
annotations = [{"tag": "source_type", "value": "link"}]
if self.debug: # pragma: no cover - only used in development
annotations.append({"tag": "group_adapter", "value": "link"})
slots = [ slots = [
SlotDefinition( SlotDefinition(
name=link.name, name=link.name,
any_of=[{"range": link.target_type}, {"range": "string"}], any_of=[{"range": link.target_type}, {"range": "string"}],
annotations=[{"tag": "source_type", "value": "link"}], annotations=annotations,
inlined=True,
**QUANTITY_MAP[link.quantity], **QUANTITY_MAP[link.quantity],
) )
for link in self.cls.links for link in self.cls.links
@ -111,6 +117,9 @@ class GroupAdapter(ClassAdapter):
inlined_as_list=False, inlined_as_list=False,
) )
if self.debug: # pragma: no cover - only used in development
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_group"}
if self.parent is not None: if self.parent is not None:
# if we have a parent, # if we have a parent,
# just return the slot itself without the class # just return the slot itself without the class
@ -144,16 +153,19 @@ class GroupAdapter(ClassAdapter):
""" """
name = camel_to_snake(self.cls.neurodata_type_inc) if not self.cls.name else cls.name name = camel_to_snake(self.cls.neurodata_type_inc) if not self.cls.name else cls.name
return BuildResult( slot = SlotDefinition(
slots=[
SlotDefinition(
name=name, name=name,
range=self.cls.neurodata_type_inc, range=self.cls.neurodata_type_inc,
description=self.cls.doc, description=self.cls.doc,
inlined=True,
inlined_as_list=False,
**QUANTITY_MAP[cls.quantity], **QUANTITY_MAP[cls.quantity],
) )
]
) if self.debug: # pragma: no cover - only used in development
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
return BuildResult(slots=[slot])
def build_subclasses(self) -> BuildResult: def build_subclasses(self) -> BuildResult:
""" """
@ -166,20 +178,9 @@ class GroupAdapter(ClassAdapter):
# for creating slots vs. classes is handled by the adapter class # for creating slots vs. classes is handled by the adapter class
dataset_res = BuildResult() dataset_res = BuildResult()
for dset in self.cls.datasets: for dset in self.cls.datasets:
# if dset.name == 'timestamps':
# pdb.set_trace()
dset_adapter = DatasetAdapter(cls=dset, parent=self) dset_adapter = DatasetAdapter(cls=dset, parent=self)
dataset_res += dset_adapter.build() dataset_res += dset_adapter.build()
# Actually i'm not sure we have to special case this, we could handle it in
# i/o instead
# Groups are a bit more complicated because they can also behave like
# range declarations:
# eg. a group can have multiple groups with `neurodata_type_inc`, no name,
# and quantity of *,
# the group can then contain any number of groups of those included types as direct children
group_res = BuildResult() group_res = BuildResult()
for group in self.cls.groups: for group in self.cls.groups:
@ -190,6 +191,33 @@ class GroupAdapter(ClassAdapter):
return res return res
def build_self_slot(self) -> SlotDefinition:
"""
If we are a child class, we make a slot so our parent can refer to us
Groups are a bit more complicated because they can also behave like
range declarations:
eg. a group can have multiple groups with `neurodata_type_inc`, no name,
and quantity of *,
the group can then contain any number of groups of those included types as direct children
We make sure that we're inlined as a dict so our parent class can refer to us like::
parent.{slot_name}[{name}] = self
"""
slot = SlotDefinition(
name=self._get_slot_name(),
description=self.cls.doc,
range=self._get_full_name(),
inlined=True,
inlined_as_list=True,
**QUANTITY_MAP[self.cls.quantity],
)
if self.debug: # pragma: no cover - only used in development
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
return slot
def _check_if_container(self, group: Group) -> bool: def _check_if_container(self, group: Group) -> bool:
""" """
Check if a given subgroup is a container subgroup, Check if a given subgroup is a container subgroup,

View file

@ -48,7 +48,16 @@ class NamespacesAdapter(Adapter):
need_imports = [] need_imports = []
for needed in ns_adapter.needed_imports.values(): for needed in ns_adapter.needed_imports.values():
need_imports.extend([n for n in needed if n not in ns_adapter.needed_imports]) # try to locate imports implied by the namespace schema,
# but are either not provided by the current namespace
# or are otherwise already provided in `imported` by the loader function
need_imports.extend(
[
n
for n in needed
if n not in ns_adapter.needed_imports and n not in ns_adapter.versions
]
)
for needed in need_imports: for needed in need_imports:
if needed in DEFAULT_REPOS: if needed in DEFAULT_REPOS:
@ -56,6 +65,8 @@ class NamespacesAdapter(Adapter):
needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns) needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns)
ns_adapter.imported.append(needed_adapter) ns_adapter.imported.append(needed_adapter)
ns_adapter.populate_imports()
return ns_adapter return ns_adapter
def build( def build(
@ -176,7 +187,6 @@ class NamespacesAdapter(Adapter):
else: else:
raise KeyError(f"No schema found that define {name}") raise KeyError(f"No schema found that define {name}")
@model_validator(mode="after")
def populate_imports(self) -> "NamespacesAdapter": def populate_imports(self) -> "NamespacesAdapter":
""" """
Populate the imports that are needed for each schema file Populate the imports that are needed for each schema file

View file

@ -6,11 +6,10 @@ See class and module docstrings for details :)
""" """
import re import re
import sys
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from types import ModuleType from types import ModuleType
from typing import ClassVar, Dict, List, Optional, Tuple from typing import Callable, ClassVar, Dict, List, Literal, Optional, Tuple
from linkml.generators import PydanticGenerator from linkml.generators import PydanticGenerator
from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray
@ -23,11 +22,14 @@ from linkml_runtime.linkml_model.meta import (
SlotDefinition, SlotDefinition,
SlotDefinitionName, SlotDefinitionName,
) )
from linkml_runtime.utils.compile_python import file_text
from linkml_runtime.utils.formatutils import remove_empty_items from linkml_runtime.utils.formatutils import remove_empty_items
from linkml_runtime.utils.schemaview import SchemaView from linkml_runtime.utils.schemaview import SchemaView
from nwb_linkml.includes.base import BASEMODEL_GETITEM from nwb_linkml.includes.base import (
BASEMODEL_COERCE_CHILD,
BASEMODEL_COERCE_VALUE,
BASEMODEL_GETITEM,
)
from nwb_linkml.includes.hdmf import ( from nwb_linkml.includes.hdmf import (
DYNAMIC_TABLE_IMPORTS, DYNAMIC_TABLE_IMPORTS,
DYNAMIC_TABLE_INJECTS, DYNAMIC_TABLE_INJECTS,
@ -36,7 +38,7 @@ from nwb_linkml.includes.hdmf import (
) )
from nwb_linkml.includes.types import ModelTypeString, NamedImports, NamedString, _get_name from nwb_linkml.includes.types import ModelTypeString, NamedImports, NamedString, _get_name
OPTIONAL_PATTERN = re.compile(r"Optional\[([\w\.]*)\]") OPTIONAL_PATTERN = re.compile(r"Optional\[(.*)\]")
@dataclass @dataclass
@ -52,6 +54,8 @@ class NWBPydanticGenerator(PydanticGenerator):
), ),
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")', 'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
BASEMODEL_GETITEM, BASEMODEL_GETITEM,
BASEMODEL_COERCE_VALUE,
BASEMODEL_COERCE_CHILD,
) )
split: bool = True split: bool = True
imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")]) imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")])
@ -66,6 +70,7 @@ class NWBPydanticGenerator(PydanticGenerator):
emit_metadata: bool = True emit_metadata: bool = True
gen_classvars: bool = True gen_classvars: bool = True
gen_slots: bool = True gen_slots: bool = True
extra_fields: Literal["allow", "forbid", "ignore"] = "allow"
skip_meta: ClassVar[Tuple[str]] = ("domain_of", "alias") skip_meta: ClassVar[Tuple[str]] = ("domain_of", "alias")
@ -131,6 +136,8 @@ class NWBPydanticGenerator(PydanticGenerator):
"""Customize dynamictable behavior""" """Customize dynamictable behavior"""
cls = AfterGenerateClass.inject_dynamictable(cls) cls = AfterGenerateClass.inject_dynamictable(cls)
cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv) cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv)
cls = AfterGenerateClass.inject_elementidentifiers(cls, sv, self._get_element_import)
cls = AfterGenerateClass.strip_vector_data_slots(cls, sv)
return cls return cls
def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule: def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule:
@ -204,15 +211,17 @@ class AfterGenerateSlot:
# merge injects/imports from the numpydantic array without using the merge method # merge injects/imports from the numpydantic array without using the merge method
if slot.injected_classes is None: if slot.injected_classes is None:
slot.injected_classes = NumpydanticArray.INJECTS.copy() slot.injected_classes = NumpydanticArray.INJECTS.copy()
else: else: # pragma: no cover - for completeness, shouldn't happen
slot.injected_classes.extend(NumpydanticArray.INJECTS.copy()) slot.injected_classes.extend(NumpydanticArray.INJECTS.copy())
if isinstance(slot.imports, list): if isinstance(
slot.imports, list
): # pragma: no cover - for completeness, shouldn't happen
slot.imports = ( slot.imports = (
Imports(imports=slot.imports) + NumpydanticArray.IMPORTS.model_copy() Imports(imports=slot.imports) + NumpydanticArray.IMPORTS.model_copy()
) )
elif isinstance(slot.imports, Imports): elif isinstance(slot.imports, Imports):
slot.imports += NumpydanticArray.IMPORTS.model_copy() slot.imports += NumpydanticArray.IMPORTS.model_copy()
else: else: # pragma: no cover - for completeness, shouldn't happen
slot.imports = NumpydanticArray.IMPORTS.model_copy() slot.imports = NumpydanticArray.IMPORTS.model_copy()
return slot return slot
@ -224,17 +233,20 @@ class AfterGenerateSlot:
""" """
if "named" in slot.source.annotations and slot.source.annotations["named"].value: if "named" in slot.source.annotations and slot.source.annotations["named"].value:
slot.attribute.range = f"Named[{slot.attribute.range}]"
slot.attribute.range = wrap_preserving_optional(slot.attribute.range, "Named")
named_injects = [ModelTypeString, _get_name, NamedString] named_injects = [ModelTypeString, _get_name, NamedString]
if slot.injected_classes is None: if slot.injected_classes is None:
slot.injected_classes = named_injects slot.injected_classes = named_injects
else: else: # pragma: no cover - for completeness, shouldn't happen
slot.injected_classes.extend([ModelTypeString, _get_name, NamedString]) slot.injected_classes.extend([ModelTypeString, _get_name, NamedString])
if isinstance(slot.imports, list): if isinstance(
slot.imports, list
): # pragma: no cover - for completeness, shouldn't happen
slot.imports = Imports(imports=slot.imports) + NamedImports slot.imports = Imports(imports=slot.imports) + NamedImports
elif isinstance(slot.imports, Imports): elif isinstance(slot.imports, Imports):
slot.imports += NamedImports slot.imports += NamedImports
else: else: # pragma: no cover - for completeness, shouldn't happen
slot.imports = NamedImports slot.imports = NamedImports
return slot return slot
@ -254,41 +266,57 @@ class AfterGenerateClass:
Returns: Returns:
""" """
if cls.cls.name in "DynamicTable": if cls.cls.name == "DynamicTable":
cls.cls.bases = ["DynamicTableMixin"] cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"]
if cls.injected_classes is None: if (
cls.injected_classes is None
): # pragma: no cover - for completeness, shouldn't happen
cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy() cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy()
else: else:
cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy()) cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy())
if isinstance(cls.imports, Imports): if isinstance(cls.imports, Imports):
cls.imports += DYNAMIC_TABLE_IMPORTS cls.imports += DYNAMIC_TABLE_IMPORTS
elif isinstance(cls.imports, list): elif isinstance(
cls.imports, list
): # pragma: no cover - for completeness, shouldn't happen
cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS
else: else: # pragma: no cover - for completeness, shouldn't happen
cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy() cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
elif cls.cls.name == "VectorData": elif cls.cls.name == "VectorData":
cls.cls.bases = ["VectorDataMixin"] cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
elif cls.cls.name == "VectorIndex": elif cls.cls.name == "VectorIndex":
cls.cls.bases = ["VectorIndexMixin"] cls.cls.bases = ["VectorIndexMixin", "ConfiguredBaseModel"]
elif cls.cls.name == "DynamicTableRegion": elif cls.cls.name == "DynamicTableRegion":
cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"] cls.cls.bases = ["DynamicTableRegionMixin", "VectorData", "ConfiguredBaseModel"]
elif cls.cls.name == "AlignedDynamicTable": elif cls.cls.name == "AlignedDynamicTable":
cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"] cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"]
elif cls.cls.name == "ElementIdentifiers":
cls.cls.bases = ["ElementIdentifiersMixin", "Data", "ConfiguredBaseModel"]
# make ``value`` generic on T
if "value" in cls.cls.attributes:
cls.cls.attributes["value"].range = "Optional[T]"
elif cls.cls.name == "TimeSeriesReferenceVectorData": elif cls.cls.name == "TimeSeriesReferenceVectorData":
# in core.nwb.base, so need to inject and import again # in core.nwb.base, so need to inject and import again
cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"] cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"]
if cls.injected_classes is None: if (
cls.injected_classes is None
): # pragma: no cover - for completeness, shouldn't happen
cls.injected_classes = TSRVD_INJECTS.copy() cls.injected_classes = TSRVD_INJECTS.copy()
else: else:
cls.injected_classes.extend(TSRVD_INJECTS.copy()) cls.injected_classes.extend(TSRVD_INJECTS.copy())
if isinstance(cls.imports, Imports): if isinstance(cls.imports, Imports):
cls.imports += TSRVD_IMPORTS cls.imports += TSRVD_IMPORTS
elif isinstance(cls.imports, list): elif isinstance(
cls.imports, list
): # pragma: no cover - for completeness, shouldn't happen
cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS
else: else: # pragma: no cover - for completeness, shouldn't happen
cls.imports = TSRVD_IMPORTS.model_copy() cls.imports = TSRVD_IMPORTS.model_copy()
return cls return cls
@ -305,34 +333,60 @@ class AfterGenerateClass:
): ):
for an_attr in cls.cls.attributes: for an_attr in cls.cls.attributes:
if "NDArray" in (slot_range := cls.cls.attributes[an_attr].range): if "NDArray" in (slot_range := cls.cls.attributes[an_attr].range):
if an_attr.endswith("_index"): if an_attr == "id":
cls.cls.attributes[an_attr].range = "".join( cls.cls.attributes[an_attr].range = "ElementIdentifiers"
["VectorIndex[", slot_range, "]"] return cls
)
else: wrap_cls = "VectorIndex" if an_attr.endswith("_index") else "VectorData"
cls.cls.attributes[an_attr].range = "".join(
["VectorData[", slot_range, "]"] cls.cls.attributes[an_attr].range = wrap_preserving_optional(
slot_range, wrap_cls
) )
return cls
@staticmethod
def inject_elementidentifiers(
cls: ClassResult, sv: SchemaView, import_method: Callable[[str], Import]
) -> ClassResult:
"""
Inject ElementIdentifiers into module that define dynamictables -
needed to handle ID columns
"""
if (
cls.source.is_a == "DynamicTable"
or "DynamicTable" in sv.class_ancestors(cls.source.name)
) and sv.schema.name != "hdmf-common.table":
imp = import_method("ElementIdentifiers")
cls.imports += [imp]
return cls
@staticmethod
def strip_vector_data_slots(cls: ClassResult, sv: SchemaView) -> ClassResult:
"""
Remove spurious ``vector_data`` slots from DynamicTables
"""
if "vector_data" in cls.cls.attributes:
del cls.cls.attributes["vector_data"]
return cls return cls
def compile_python( def wrap_preserving_optional(annotation: str, wrap: str) -> str:
text_or_fn: str, package_path: Path = None, module_name: str = "test"
) -> ModuleType:
""" """
Compile the text or file and return the resulting module Add a wrapping type to a type annotation string,
@param text_or_fn: Python text or file name that references python file preserving any `Optional[]` annotation, bumping it to the outside
@param package_path: Root package path. If omitted and we've got a python file,
the package is the containing
directory
@return: Compiled module
"""
python_txt = file_text(text_or_fn)
if package_path is None and python_txt != text_or_fn:
package_path = Path(text_or_fn)
spec = compile(python_txt, "<string>", "exec")
module = ModuleType(module_name)
exec(spec, module.__dict__) Examples:
sys.modules[module_name] = module
return module >>> wrap_preserving_optional('Optional[list[str]]', 'NewType')
'Optional[NewType[list[str]]]'
"""
is_optional = OPTIONAL_PATTERN.match(annotation)
if is_optional:
annotation = is_optional.groups()[0]
annotation = f"Optional[{wrap}[{annotation}]]"
else:
annotation = f"{wrap}[{annotation}]"
return annotation

View file

@ -12,3 +12,38 @@ BASEMODEL_GETITEM = """
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
""" """
BASEMODEL_COERCE_VALUE = """
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
\"\"\"Try to rescue instantiation by using the value field\"\"\"
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
"""
BASEMODEL_COERCE_CHILD = """
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
\"\"\"Recast parent classes into child classes\"\"\"
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
"""

View file

@ -53,8 +53,11 @@ class DynamicTableMixin(BaseModel):
NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( NON_COLUMN_FIELDS: ClassVar[tuple[str]] = (
"id", "id",
"name", "name",
"categories",
"colnames", "colnames",
"description", "description",
"hdf5_path",
"object_id",
) )
# overridden by subclass but implemented here for testing and typechecking purposes :) # overridden by subclass but implemented here for testing and typechecking purposes :)
@ -138,7 +141,7 @@ class DynamicTableMixin(BaseModel):
# cast to DF # cast to DF
if not isinstance(index, Iterable): if not isinstance(index, Iterable):
index = [index] index = [index]
index = pd.Index(data=index) index = pd.Index(data=index, name="id")
return pd.DataFrame(data, index=index) return pd.DataFrame(data, index=index)
def _slice_range( def _slice_range(
@ -246,11 +249,14 @@ class DynamicTableMixin(BaseModel):
if k not in cls.NON_COLUMN_FIELDS if k not in cls.NON_COLUMN_FIELDS
and not k.endswith("_index") and not k.endswith("_index")
and not isinstance(model[k], VectorIndexMixin) and not isinstance(model[k], VectorIndexMixin)
and model[k] is not None
] ]
model["colnames"] = colnames model["colnames"] = colnames
else: else:
# add any columns not explicitly given an order at the end # add any columns not explicitly given an order at the end
colnames = model["colnames"].copy() colnames = model["colnames"].copy()
if isinstance(colnames, np.ndarray):
colnames = colnames.tolist()
colnames.extend( colnames.extend(
[ [
k k
@ -259,6 +265,7 @@ class DynamicTableMixin(BaseModel):
and not k.endswith("_index") and not k.endswith("_index")
and k not in model["colnames"] and k not in model["colnames"]
and not isinstance(model[k], VectorIndexMixin) and not isinstance(model[k], VectorIndexMixin)
and model[k] is not None
] ]
) )
model["colnames"] = colnames model["colnames"] = colnames
@ -277,17 +284,25 @@ class DynamicTableMixin(BaseModel):
if isinstance(model, dict): if isinstance(model, dict):
for key, val in model.items(): for key, val in model.items():
if key in cls.model_fields: if key in cls.model_fields or key in cls.NON_COLUMN_FIELDS:
continue continue
if not isinstance(val, (VectorData, VectorIndex)): if not isinstance(val, (VectorData, VectorIndex)):
try: try:
if key.endswith("_index"): to_cast = VectorIndex if key.endswith("_index") else VectorData
model[key] = VectorIndex(name=key, description="", value=val) if isinstance(val, dict):
model[key] = to_cast(**val)
else: else:
model[key] = VectorData(name=key, description="", value=val) model[key] = to_cast(name=key, description="", value=val)
except ValidationError as e: # pragma: no cover except ValidationError as e: # pragma: no cover
raise ValidationError( raise ValidationError.from_exception_data(
f"field {key} cannot be cast to VectorData from {val}" title=f"field {key} cannot be cast to VectorData from {val}",
line_errors=[
{
"type": "ValueError",
"loc": ("DynamicTableMixin", "cast_extra_columns"),
"input": val,
}
],
) from e ) from e
return model return model
@ -320,9 +335,9 @@ class DynamicTableMixin(BaseModel):
""" """
Ensure that all columns are equal length Ensure that all columns are equal length
""" """
lengths = [len(v) for v in self._columns.values()] + [len(self.id)] lengths = [len(v) for v in self._columns.values() if v is not None] + [len(self.id)]
assert all([length == lengths[0] for length in lengths]), ( assert all([length == lengths[0] for length in lengths]), (
"Columns are not of equal length! " "DynamicTable columns are not of equal length! "
f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" f"Got colnames:\n{self.colnames}\nand lengths: {lengths}"
) )
return self return self
@ -370,7 +385,7 @@ class VectorDataMixin(BaseModel, Generic[T]):
# redefined in `VectorData`, but included here for testing and type checking # redefined in `VectorData`, but included here for testing and type checking
value: Optional[T] = None value: Optional[T] = None
def __init__(self, value: Optional[NDArray] = None, **kwargs): def __init__(self, value: Optional[T] = None, **kwargs):
if value is not None and "value" not in kwargs: if value is not None and "value" not in kwargs:
kwargs["value"] = value kwargs["value"] = value
super().__init__(**kwargs) super().__init__(**kwargs)
@ -571,10 +586,13 @@ class AlignedDynamicTableMixin(BaseModel):
__pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]]
NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = (
"id",
"name", "name",
"categories", "categories",
"colnames", "colnames",
"description", "description",
"hdf5_path",
"object_id",
) )
name: str = "aligned_table" name: str = "aligned_table"
@ -604,28 +622,29 @@ class AlignedDynamicTableMixin(BaseModel):
elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str):
# get a slice of a single table # get a slice of a single table
return self._categories[item[1]][item[0]] return self._categories[item[1]][item[0]]
elif isinstance(item, (int, slice, Iterable)): elif isinstance(item, (int, slice, Iterable, np.int_)):
# get a slice of all the tables # get a slice of all the tables
ids = self.id[item] ids = self.id[item]
if not isinstance(ids, Iterable): if not isinstance(ids, Iterable):
ids = pd.Series([ids]) ids = pd.Series([ids])
ids = pd.DataFrame({"id": ids}) ids = pd.Index(data=ids, name="id")
tables = [ids] tables = []
for category_name, category in self._categories.items(): for category_name, category in self._categories.items():
table = category[item] table = category[item]
if isinstance(table, pd.DataFrame): if isinstance(table, pd.DataFrame):
table = table.reset_index() table = table.reset_index()
table.index = ids
elif isinstance(table, np.ndarray): elif isinstance(table, np.ndarray):
table = pd.DataFrame({category_name: [table]}) table = pd.DataFrame({category_name: [table]}, index=ids)
elif isinstance(table, Iterable): elif isinstance(table, Iterable):
table = pd.DataFrame({category_name: table}) table = pd.DataFrame({category_name: table}, index=ids)
else: else:
raise ValueError( raise ValueError(
f"Don't know how to construct category table for {category_name}" f"Don't know how to construct category table for {category_name}"
) )
tables.append(table) tables.append(table)
names = [self.name] + self.categories # names = [self.name] + self.categories
# construct below in case we need to support array indexing in the future # construct below in case we need to support array indexing in the future
else: else:
raise ValueError( raise ValueError(
@ -633,8 +652,7 @@ class AlignedDynamicTableMixin(BaseModel):
"need an int, string, slice, ndarray, or tuple[int | slice, str]" "need an int, string, slice, ndarray, or tuple[int | slice, str]"
) )
df = pd.concat(tables, axis=1, keys=names) df = pd.concat(tables, axis=1, keys=self.categories)
df.set_index((self.name, "id"), drop=True, inplace=True)
return df return df
def __getattr__(self, item: str) -> Any: def __getattr__(self, item: str) -> Any:
@ -692,14 +710,19 @@ class AlignedDynamicTableMixin(BaseModel):
model["categories"] = categories model["categories"] = categories
else: else:
# add any columns not explicitly given an order at the end # add any columns not explicitly given an order at the end
categories = [ categories = model["categories"].copy()
if isinstance(categories, np.ndarray):
categories = categories.tolist()
categories.extend(
[
k k
for k in model for k in model
if k not in cls.NON_COLUMN_FIELDS if k not in cls.NON_CATEGORY_FIELDS
and not k.endswith("_index") and not k.endswith("_index")
and k not in model["categories"] and k not in model["categories"]
] ]
model["categories"].extend(categories) )
model["categories"] = categories
return model return model
@model_validator(mode="after") @model_validator(mode="after")
@ -733,7 +756,7 @@ class AlignedDynamicTableMixin(BaseModel):
""" """
lengths = [len(v) for v in self._categories.values()] + [len(self.id)] lengths = [len(v) for v in self._categories.values()] + [len(self.id)]
assert all([length == lengths[0] for length in lengths]), ( assert all([length == lengths[0] for length in lengths]), (
"Columns are not of equal length! " "AlignedDynamicTableColumns are not of equal length! "
f"Got colnames:\n{self.categories}\nand lengths: {lengths}" f"Got colnames:\n{self.categories}\nand lengths: {lengths}"
) )
return self return self
@ -828,6 +851,13 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin):
) )
class ElementIdentifiersMixin(VectorDataMixin):
"""
Mixin class for ElementIdentifiers - allow treating
as generic, and give general indexing methods from VectorData
"""
DYNAMIC_TABLE_IMPORTS = Imports( DYNAMIC_TABLE_IMPORTS = Imports(
imports=[ imports=[
Import(module="pandas", alias="pd"), Import(module="pandas", alias="pd"),
@ -871,6 +901,7 @@ DYNAMIC_TABLE_INJECTS = [
DynamicTableRegionMixin, DynamicTableRegionMixin,
DynamicTableMixin, DynamicTableMixin,
AlignedDynamicTableMixin, AlignedDynamicTableMixin,
ElementIdentifiersMixin,
] ]
TSRVD_IMPORTS = Imports( TSRVD_IMPORTS = Imports(
@ -912,3 +943,8 @@ if "pytest" in sys.modules:
"""TimeSeriesReferenceVectorData subclass for testing""" """TimeSeriesReferenceVectorData subclass for testing"""
pass pass
class ElementIdentifiers(ElementIdentifiersMixin):
"""ElementIdentifiers subclass for testing"""
pass

View file

@ -22,6 +22,7 @@ Other TODO:
import json import json
import os import os
import re
import shutil import shutil
import subprocess import subprocess
import sys import sys
@ -31,11 +32,18 @@ from types import ModuleType
from typing import TYPE_CHECKING, Dict, List, Optional, Union, overload from typing import TYPE_CHECKING, Dict, List, Optional, Union, overload
import h5py import h5py
import networkx as nx
import numpy as np import numpy as np
from numpydantic.interface.hdf5 import H5ArrayPath
from pydantic import BaseModel from pydantic import BaseModel
from tqdm import tqdm from tqdm import tqdm
from nwb_linkml.maps.hdf5 import ReadPhases, ReadQueue, flatten_hdf from nwb_linkml.maps.hdf5 import (
get_attr_references,
get_dataset_references,
get_references,
resolve_hardlink,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from nwb_linkml.providers.schema import SchemaProvider from nwb_linkml.providers.schema import SchemaProvider
@ -47,6 +55,221 @@ else:
from typing_extensions import Never from typing_extensions import Never
SKIP_PATTERN = re.compile("(^/specifications.*)|(\.specloc)")
"""Nodes to always skip in reading e.g. because they are handled elsewhere"""
def hdf_dependency_graph(h5f: Path | h5py.File | h5py.Group) -> nx.DiGraph:
"""
Directed dependency graph of dataset and group nodes in an NWBFile such that
each node ``n_i`` is connected to node ``n_j`` if
* ``n_j`` is ``n_i``'s child
* ``n_i`` contains a reference to ``n_j``
Resolve references in
* Attributes
* Dataset columns
* Compound dtypes
Edges are labeled with ``reference`` or ``child`` depending on the type of edge it is,
and attributes from the hdf5 file are added as node attributes.
Args:
h5f (:class:`pathlib.Path` | :class:`h5py.File`): NWB file to graph
Returns:
:class:`networkx.DiGraph`
"""
if isinstance(h5f, (Path, str)):
h5f = h5py.File(h5f, "r")
g = nx.DiGraph()
def _visit_item(name: str, node: h5py.Dataset | h5py.Group) -> None:
if SKIP_PATTERN.match(node.name):
return
# find references in attributes
refs = get_references(node)
# add edges from references
edges = [(node.name, ref) for ref in refs if not SKIP_PATTERN.match(ref)]
g.add_edges_from(edges, label="reference")
# add children, if group
if isinstance(node, h5py.Group):
children = [
resolve_hardlink(child)
for child in node.values()
if not SKIP_PATTERN.match(child.name)
]
edges = [(node.name, ref) for ref in children if not SKIP_PATTERN.match(ref)]
g.add_edges_from(edges, label="child")
# ensure node added to graph
if len(edges) == 0:
g.add_node(node.name)
# store attrs in node
g.nodes[node.name].update(node.attrs)
# apply to root
_visit_item(h5f.name, h5f)
h5f.visititems(_visit_item)
return g
def filter_dependency_graph(g: nx.DiGraph) -> nx.DiGraph:
"""
Remove nodes from a dependency graph if they
* have no neurodata type AND
* have no outbound edges
OR
* are a VectorIndex (which are handled by the dynamictable mixins)
"""
remove_nodes = []
node: str
for node in g.nodes:
ndtype = g.nodes[node].get("neurodata_type", None)
if (ndtype is None and g.out_degree(node) == 0) or SKIP_PATTERN.match(node):
remove_nodes.append(node)
g.remove_nodes_from(remove_nodes)
return g
def _load_node(
path: str, h5f: h5py.File, provider: "SchemaProvider", context: dict
) -> dict | BaseModel:
"""
Load an individual node in the graph, then removes it from the graph
Args:
path:
g:
context:
Returns:
"""
obj = h5f.get(path)
if isinstance(obj, h5py.Dataset):
args = _load_dataset(obj, h5f, context)
elif isinstance(obj, h5py.Group):
args = _load_group(obj, h5f, context)
else:
raise TypeError(f"Nodes can only be h5py Datasets and Groups, got {obj}")
if "neurodata_type" in obj.attrs:
model = provider.get_class(obj.attrs["namespace"], obj.attrs["neurodata_type"])
return model(**args)
else:
if "name" in args:
del args["name"]
if "hdf5_path" in args:
del args["hdf5_path"]
return args
def _load_dataset(
dataset: h5py.Dataset, h5f: h5py.File, context: dict
) -> Union[dict, str, int, float]:
"""
Resolves datasets that do not have a ``neurodata_type`` as a dictionary or a scalar.
If the dataset is a single value without attrs, load it and return as a scalar value.
Otherwise return a :class:`.H5ArrayPath` as a reference to the dataset in the `value` key.
"""
res = {}
if dataset.shape == ():
val = dataset[()]
if isinstance(val, h5py.h5r.Reference):
val = context.get(h5f[val].name)
# if this is just a scalar value, return it
if not dataset.attrs:
return val
res["value"] = val
elif len(dataset) > 0 and isinstance(dataset[0], h5py.h5r.Reference):
# vector of references
res["value"] = [context.get(h5f[ref].name) for ref in dataset[:]]
elif len(dataset.dtype) > 1:
# compound dataset - check if any of the fields are references
for name in dataset.dtype.names:
if isinstance(dataset[name][0], h5py.h5r.Reference):
res[name] = [context.get(h5f[ref].name) for ref in dataset[name]]
else:
res[name] = H5ArrayPath(h5f.filename, dataset.name, name)
else:
res["value"] = H5ArrayPath(h5f.filename, dataset.name)
res.update(dataset.attrs)
if "namespace" in res:
del res["namespace"]
if "neurodata_type" in res:
del res["neurodata_type"]
res["name"] = dataset.name.split("/")[-1]
res["hdf5_path"] = dataset.name
# resolve attr references
for k, v in res.items():
if isinstance(v, h5py.h5r.Reference):
ref_path = h5f[v].name
if SKIP_PATTERN.match(ref_path):
res[k] = ref_path
else:
res[k] = context[ref_path]
if len(res) == 1:
return res["value"]
else:
return res
def _load_group(group: h5py.Group, h5f: h5py.File, context: dict) -> dict:
"""
Load a group!
"""
res = {}
res.update(group.attrs)
for child_name, child in group.items():
if child.name in context:
res[child_name] = context[child.name]
elif isinstance(child, h5py.Dataset):
res[child_name] = _load_dataset(child, h5f, context)
elif isinstance(child, h5py.Group):
res[child_name] = _load_group(child, h5f, context)
else:
raise TypeError(
"Can only handle preinstantiated child objects in context, datasets, and group,"
f" got {child} for {child_name}"
)
if "namespace" in res:
del res["namespace"]
if "neurodata_type" in res:
del res["neurodata_type"]
name = group.name.split("/")[-1]
if name:
res["name"] = name
res["hdf5_path"] = group.name
# resolve attr references
for k, v in res.items():
if isinstance(v, h5py.h5r.Reference):
ref_path = h5f[v].name
if SKIP_PATTERN.match(ref_path):
res[k] = ref_path
else:
res[k] = context[ref_path]
return res
class HDF5IO: class HDF5IO:
""" """
Read (and eventually write) from an NWB HDF5 file. Read (and eventually write) from an NWB HDF5 file.
@ -106,28 +329,22 @@ class HDF5IO:
h5f = h5py.File(str(self.path)) h5f = h5py.File(str(self.path))
src = h5f.get(path) if path else h5f src = h5f.get(path) if path else h5f
graph = hdf_dependency_graph(src)
graph = filter_dependency_graph(graph)
# get all children of selected item # topo sort to get read order
if isinstance(src, (h5py.File, h5py.Group)): # TODO: This could be parallelized using `topological_generations`,
children = flatten_hdf(src) # but it's not clear what the perf bonus would be because there are many generations
else: # with few items
raise NotImplementedError("directly read individual datasets") topo_order = list(reversed(list(nx.topological_sort(graph))))
context = {}
queue = ReadQueue(h5f=self.path, queue=children, provider=provider) for node in topo_order:
res = _load_node(node, h5f, provider, context)
# Apply initial planning phase of reading context[node] = res
queue.apply_phase(ReadPhases.plan)
# Read operations gather the data before casting into models
queue.apply_phase(ReadPhases.read)
# Construction operations actually cast the models
# this often needs to run several times as models with dependencies wait for their
# dependents to be cast
queue.apply_phase(ReadPhases.construct)
if path is None: if path is None:
return queue.completed["/"].result path = "/"
else: return context[path]
return queue.completed[path].result
def write(self, path: Path) -> Never: def write(self, path: Path) -> Never:
""" """
@ -167,7 +384,7 @@ class HDF5IO:
""" """
from nwb_linkml.providers.schema import SchemaProvider from nwb_linkml.providers.schema import SchemaProvider
h5f = h5py.File(str(self.path)) h5f = h5py.File(str(self.path), "r")
schema = read_specs_as_dicts(h5f.get("specifications")) schema = read_specs_as_dicts(h5f.get("specifications"))
# get versions for each namespace # get versions for each namespace
@ -269,7 +486,7 @@ def find_references(h5f: h5py.File, path: str) -> List[str]:
return references return references
def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> Path: def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> Path | None:
""" """
Create a truncated HDF5 file where only the first few samples are kept. Create a truncated HDF5 file where only the first few samples are kept.
@ -285,6 +502,14 @@ def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> P
Returns: Returns:
:class:`pathlib.Path` path of the truncated file :class:`pathlib.Path` path of the truncated file
""" """
if shutil.which("h5repack") is None:
warnings.warn(
"Truncation requires h5repack to be available, "
"or else the truncated files will be no smaller than the originals",
stacklevel=2,
)
return
target = source.parent / (source.stem + "_truncated.hdf5") if target is None else Path(target) target = source.parent / (source.stem + "_truncated.hdf5") if target is None else Path(target)
source = Path(source) source = Path(source)
@ -300,17 +525,34 @@ def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> P
os.chmod(target, 0o774) os.chmod(target, 0o774)
to_resize = [] to_resize = []
attr_refs = {}
dataset_refs = {}
def _need_resizing(name: str, obj: h5py.Dataset | h5py.Group) -> None: def _need_resizing(name: str, obj: h5py.Dataset | h5py.Group) -> None:
if isinstance(obj, h5py.Dataset) and obj.size > n: if isinstance(obj, h5py.Dataset) and obj.size > n:
to_resize.append(name) to_resize.append(name)
print("Resizing datasets...") def _find_attr_refs(name: str, obj: h5py.Dataset | h5py.Group) -> None:
"""Find all references in object attrs"""
refs = get_attr_references(obj)
if refs:
attr_refs[name] = refs
def _find_dataset_refs(name: str, obj: h5py.Dataset | h5py.Group) -> None:
"""Find all references in datasets themselves"""
refs = get_dataset_references(obj)
if refs:
dataset_refs[name] = refs
# first we get the items that need to be resized and then resize them below # first we get the items that need to be resized and then resize them below
# problems with writing to the file from within the visititems call # problems with writing to the file from within the visititems call
print("Planning resize...")
h5f_target = h5py.File(str(target), "r+") h5f_target = h5py.File(str(target), "r+")
h5f_target.visititems(_need_resizing) h5f_target.visititems(_need_resizing)
h5f_target.visititems(_find_attr_refs)
h5f_target.visititems(_find_dataset_refs)
print("Resizing datasets...")
for resize in to_resize: for resize in to_resize:
obj = h5f_target.get(resize) obj = h5f_target.get(resize)
try: try:
@ -320,10 +562,14 @@ def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> P
# so we have to copy and create a new dataset # so we have to copy and create a new dataset
tmp_name = obj.name + "__tmp" tmp_name = obj.name + "__tmp"
original_name = obj.name original_name = obj.name
obj.parent.move(obj.name, tmp_name) obj.parent.move(obj.name, tmp_name)
old_obj = obj.parent.get(tmp_name) old_obj = obj.parent.get(tmp_name)
new_obj = obj.parent.create_dataset(original_name, data=old_obj[0:n]) new_obj = obj.parent.create_dataset(
original_name, data=old_obj[0:n], dtype=old_obj.dtype
)
for k, v in old_obj.attrs.items(): for k, v in old_obj.attrs.items():
new_obj.attrs[k] = v new_obj.attrs[k] = v
del new_obj.parent[tmp_name] del new_obj.parent[tmp_name]
@ -331,16 +577,18 @@ def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> P
h5f_target.close() h5f_target.close()
# use h5repack to actually remove the items from the dataset # use h5repack to actually remove the items from the dataset
if shutil.which("h5repack") is None:
warnings.warn(
"Truncated file made, but since h5repack not found in path, file won't be any smaller",
stacklevel=2,
)
return target
print("Repacking hdf5...") print("Repacking hdf5...")
res = subprocess.run( res = subprocess.run(
["h5repack", "-f", "GZIP=9", str(target), str(target_tmp)], capture_output=True [
"h5repack",
"--verbose=2",
"--enable-error-stack",
"-f",
"GZIP=9",
str(target),
str(target_tmp),
],
capture_output=True,
) )
if res.returncode != 0: if res.returncode != 0:
warnings.warn(f"h5repack did not return 0: {res.stderr} {res.stdout}", stacklevel=2) warnings.warn(f"h5repack did not return 0: {res.stderr} {res.stdout}", stacklevel=2)
@ -348,6 +596,36 @@ def truncate_file(source: Path, target: Optional[Path] = None, n: int = 10) -> P
target_tmp.unlink() target_tmp.unlink()
return target return target
h5f_target = h5py.File(str(target_tmp), "r+")
# recreate references after repacking, because repacking ruins them if they
# are in a compound dtype
for obj_name, obj_refs in attr_refs.items():
obj = h5f_target.get(obj_name)
for attr_name, ref_target in obj_refs.items():
ref_target = h5f_target.get(ref_target)
obj.attrs[attr_name] = ref_target.ref
for obj_name, obj_refs in dataset_refs.items():
obj = h5f_target.get(obj_name)
if isinstance(obj_refs, list):
if len(obj_refs) == 1:
ref_target = h5f_target.get(obj_refs[0])
obj[()] = ref_target.ref
else:
targets = [h5f_target.get(ref).ref for ref in obj_refs[:n]]
obj[:] = targets
else:
# dict for a compound dataset
for col_name, column_refs in obj_refs.items():
targets = [h5f_target.get(ref).ref for ref in column_refs[:n]]
data = obj[:]
data[col_name] = targets
obj[:] = data
h5f_target.flush()
h5f_target.close()
target.unlink() target.unlink()
target_tmp.rename(target) target_tmp.rename(target)

View file

@ -131,6 +131,8 @@ def load_namespace_adapter(
else: else:
adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch) adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
adapter.populate_imports()
return adapter return adapter

View file

@ -3,7 +3,7 @@ Dtype mappings
""" """
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any, Optional
import numpy as np import numpy as np
@ -160,14 +160,28 @@ def handle_dtype(dtype: DTypeType | None) -> str:
elif isinstance(dtype, FlatDtype): elif isinstance(dtype, FlatDtype):
return dtype.value return dtype.value
elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype): elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype):
# there is precisely one class that uses compound dtypes: # Compound Dtypes are handled by the MapCompoundDtype dataset map,
# TimeSeriesReferenceVectorData # but this function is also used within ``check`` methods, so we should always
# compoundDtypes are able to define a ragged table according to the schema # return something from it rather than raise
# but are used in this single case equivalently to attributes.
# so we'll... uh... treat them as slots.
# TODO
return "AnyType" return "AnyType"
else: else:
# flat dtype # flat dtype
return dtype return dtype
def inlined(dtype: DTypeType | None) -> Optional[bool]:
"""
Check if a slot should be inlined based on its dtype
for now that is equivalent to checking whether that dtype is another a reference dtype,
but the function remains semantically reserved for answering this question w.r.t. dtype.
Returns ``None`` if not inlined to not clutter generated models with unnecessary props
"""
return (
True
if isinstance(dtype, ReferenceDtype)
or (isinstance(dtype, CompoundDtype) and isinstance(dtype.dtype, ReferenceDtype))
else None
)

View file

@ -5,832 +5,47 @@ We have sort of diverged from the initial idea of a generalized map as in :class
so we will make our own mapping class here and re-evaluate whether they should be unified later so we will make our own mapping class here and re-evaluate whether they should be unified later
""" """
# FIXME: return and document whatever is left of this godforsaken module after refactoring
# ruff: noqa: D102 # ruff: noqa: D102
# ruff: noqa: D101 # ruff: noqa: D101
import contextlib from typing import List, Union
import datetime
import inspect
import sys
from abc import abstractmethod
from pathlib import Path
from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Tuple, Type, Union
import h5py import h5py
from numpydantic.interface.hdf5 import H5ArrayPath
from pydantic import BaseModel, ConfigDict, Field
from nwb_linkml.annotations import unwrap_optional
from nwb_linkml.maps import Map
from nwb_linkml.types.hdf5 import HDF5_Path
if sys.version_info.minor >= 11:
from enum import StrEnum
else:
from enum import Enum
class StrEnum(str, Enum):
"""StrEnum-ish class for python 3.10"""
if TYPE_CHECKING: def get_attr_references(obj: h5py.Dataset | h5py.Group) -> dict[str, str]:
from nwb_linkml.providers.schema import SchemaProvider """
Get any references in object attributes
"""
refs = {
k: obj.file.get(ref).name
for k, ref in obj.attrs.items()
if isinstance(ref, h5py.h5r.Reference)
}
return refs
class ReadPhases(StrEnum): def get_dataset_references(obj: h5py.Dataset | h5py.Group) -> list[str] | dict[str, str]:
plan = "plan"
"""Before reading starts, building an index of objects to read"""
read = "read"
"""Main reading operation"""
construct = "construct"
"""After reading, casting the results of the read into their models"""
class H5SourceItem(BaseModel):
""" """
Descriptor of items for each element when :func:`.flatten_hdf` flattens an hdf5 file. Get references in datasets
Consumed by :class:`.HDF5Map` classes, orchestrated by :class:`.ReadQueue`
"""
path: str
"""Absolute hdf5 path of element"""
h5f_path: str
"""Path to the source hdf5 file"""
leaf: bool
"""
If ``True``, this item has no children
(and thus we should start instantiating it before ascending to parent classes)
"""
h5_type: Literal["group", "dataset"]
"""What kind of hdf5 element this is"""
depends: List[str] = Field(default_factory=list)
"""
Paths of other source items that this item depends on before it can be instantiated.
eg. from softlinks
"""
attrs: dict = Field(default_factory=dict)
"""Any static attrs that can be had from the element"""
namespace: Optional[str] = None
"""Optional: The namespace that the neurodata type belongs to"""
neurodata_type: Optional[str] = None
"""Optional: the neurodata type for this dataset or group"""
model_config = ConfigDict(arbitrary_types_allowed=True)
@property
def parts(self) -> List[str]:
"""path split by /"""
return self.path.split("/")
class H5ReadResult(BaseModel):
"""
Result returned by each of our mapping operations.
Also used as the source for operations in the ``construct`` :class:`.ReadPhases`
"""
path: str
"""absolute hdf5 path of element"""
source: Union[H5SourceItem, "H5ReadResult"]
"""
Source that this result is based on.
The map can modify this item, so the container should update the source
queue on each pass
"""
completed: bool = False
"""
Was this item completed by this map step? False for cases where eg.
we still have dependencies that need to be completed before this one
"""
result: Optional[dict | str | int | float | BaseModel] = None
"""
If completed, built result. A dict that can be instantiated into the model.
If completed is True and result is None, then remove this object
"""
model: Optional[Type[BaseModel]] = None
"""
The model that this item should be cast into
"""
completes: List[HDF5_Path] = Field(default_factory=list)
"""
If this result completes any other fields, we remove them from the build queue.
"""
namespace: Optional[str] = None
"""
Optional: the namespace of the neurodata type for this object
"""
neurodata_type: Optional[str] = None
"""
Optional: The neurodata type to use for this object
"""
applied: List[str] = Field(default_factory=list)
"""
Which map operations were applied to this item
"""
errors: List[str] = Field(default_factory=list)
"""
Problems that occurred during resolution
"""
depends: List[HDF5_Path] = Field(default_factory=list)
"""
Other items that the final resolution of this item depends on
"""
FlatH5 = Dict[str, H5SourceItem]
class HDF5Map(Map):
phase: ReadPhases
priority: int = 0
"""
Within a phase, sort mapping operations from low to high priority
(maybe this should be renamed because highest priority last doesn't make a lot of sense)
"""
@classmethod
@abstractmethod
def check(
cls,
src: H5SourceItem | H5ReadResult,
provider: "SchemaProvider",
completed: Dict[str, H5ReadResult],
) -> bool:
"""Check if this map applies to the given item to read"""
@classmethod
@abstractmethod
def apply(
cls,
src: H5SourceItem | H5ReadResult,
provider: "SchemaProvider",
completed: Dict[str, H5ReadResult],
) -> H5ReadResult:
"""Actually apply the map!"""
# --------------------------------------------------
# Planning maps
# --------------------------------------------------
def check_empty(obj: h5py.Group) -> bool:
"""
Check if a group has no attrs or children OR has no attrs and all its children
also have no attrs and no children
Returns:
bool
""" """
refs = []
# For datasets, apply checks depending on shape of data.
if isinstance(obj, h5py.Dataset): if isinstance(obj, h5py.Dataset):
return False if obj.shape == ():
# scalar
# check if we are empty if isinstance(obj[()], h5py.h5r.Reference):
no_attrs = False refs = [obj.file.get(obj[()]).name]
if len(obj.attrs) == 0: elif len(obj) > 0 and isinstance(obj[0], h5py.h5r.Reference):
no_attrs = True # single-column
refs = [obj.file.get(ref).name for ref in obj[:]]
no_children = False elif len(obj.dtype) > 1:
if len(obj.keys()) == 0: # "compound" datasets
no_children = True refs = {}
for name in obj.dtype.names:
# check if immediate children are empty if isinstance(obj[name][0], h5py.h5r.Reference):
# handles empty groups of empty groups refs[name] = [obj.file.get(ref).name for ref in obj[name]]
children_empty = False return refs
if all(
[
isinstance(item, h5py.Group) and len(item.keys()) == 0 and len(item.attrs) == 0
for item in obj.values()
]
):
children_empty = True
# if we have no attrs and we are a leaf OR our children are empty, remove us
return bool(no_attrs and (no_children or children_empty))
class PruneEmpty(HDF5Map):
"""Remove groups with no attrs"""
phase = ReadPhases.plan
@classmethod
def check(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
if src.h5_type == "group":
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
return check_empty(obj)
@classmethod
def apply(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
return H5ReadResult.model_construct(path=src.path, source=src, completed=True)
#
# class ResolveDynamicTable(HDF5Map):
# """
# Handle loading a dynamic table!
#
# Dynamic tables are sort of odd in that their models don't include their fields
# (except as a list of strings in ``colnames`` ),
# so we need to create a new model that includes fields for each column,
# and then we include the datasets as :class:`~numpydantic.interface.hdf5.H5ArrayPath`
# objects which lazy load the arrays in a thread/process safe way.
#
# This map also resolves the child elements,
# indicating so by the ``completes`` field in the :class:`.ReadResult`
# """
#
# phase = ReadPhases.read
# priority = 1
#
# @classmethod
# def check(
# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
# ) -> bool:
# if src.h5_type == "dataset":
# return False
# if "neurodata_type" in src.attrs:
# if src.attrs["neurodata_type"] == "DynamicTable":
# return True
# # otherwise, see if it's a subclass
# model = provider.get_class(src.attrs["namespace"], src.attrs["neurodata_type"])
# # just inspect the MRO as strings rather than trying to check subclasses because
# # we might replace DynamicTable in the future, and there isn't a stable DynamicTable
# # class to inherit from anyway because of the whole multiple versions thing
# parents = [parent.__name__ for parent in model.__mro__]
# return "DynamicTable" in parents
# else:
# return False
#
# @classmethod
# def apply(
# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
# ) -> H5ReadResult:
# with h5py.File(src.h5f_path, "r") as h5f:
# obj = h5f.get(src.path)
#
# # make a populated model :)
# base_model = provider.get_class(src.namespace, src.neurodata_type)
# model = dynamictable_to_model(obj, base=base_model)
#
# completes = [HDF5_Path(child.name) for child in obj.values()]
#
# return H5ReadResult(
# path=src.path,
# source=src,
# result=model,
# completes=completes,
# completed=True,
# applied=["ResolveDynamicTable"],
# )
class ResolveModelGroup(HDF5Map):
"""
HDF5 Groups that have a model, as indicated by ``neurodata_type`` in their attrs.
We use the model to determine what fields we should get, and then stash references
to the children to process later as :class:`.HDF5_Path`
**Special Case:** Some groups like ``ProcessingGroup`` and others that have an arbitrary
number of named children have a special ``children`` field that is a dictionary mapping
names to the objects themselves.
So for example, this:
/processing/
eye_tracking/
cr_ellipse_fits/
center_x
center_y
...
eye_ellipse_fits/
...
pupil_ellipse_fits/
...
eye_tracking_rig_metadata/
...
would pack the ``eye_tracking`` group (a ``ProcessingModule`` ) as:
{
"name": "eye_tracking",
"children": {
"cr_ellipse_fits": HDF5_Path('/processing/eye_tracking/cr_ellipse_fits'),
"eye_ellipse_fits" : HDF5_Path('/processing/eye_tracking/eye_ellipse_fits'),
...
}
}
We will do some nice things in the model metaclass to make it possible to access the children
like ``nwbfile.processing.cr_ellipse_fits.center_x``
rather than having to switch between indexing and attribute access :)
"""
phase = ReadPhases.read
priority = 10 # do this generally last
@classmethod
def check(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
return bool("neurodata_type" in src.attrs and src.h5_type == "group")
@classmethod
def apply(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
model = provider.get_class(src.namespace, src.neurodata_type)
res = {}
depends = []
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
for key in model.model_fields:
if key == "children":
res[key] = {name: resolve_hardlink(child) for name, child in obj.items()}
depends.extend([resolve_hardlink(child) for child in obj.values()])
elif key in obj.attrs:
res[key] = obj.attrs[key]
continue
elif key in obj:
# make sure it's not empty
if check_empty(obj[key]):
continue
# stash a reference to this, we'll compile it at the end
depends.append(resolve_hardlink(obj[key]))
res[key] = resolve_hardlink(obj[key])
res["hdf5_path"] = src.path
res["name"] = src.parts[-1]
return H5ReadResult(
path=src.path,
source=src,
completed=True,
result=res,
model=model,
namespace=src.namespace,
neurodata_type=src.neurodata_type,
applied=["ResolveModelGroup"],
depends=depends,
)
class ResolveDatasetAsDict(HDF5Map):
"""
Resolve datasets that do not have a ``neurodata_type`` of their own as a dictionary
that will be packaged into a model in the next step. Grabs the array in an
:class:`~numpydantic.interface.hdf5.H5ArrayPath`
under an ``array`` key, and then grabs any additional ``attrs`` as well.
Mutually exclusive with :class:`.ResolveScalars` - this only applies to datasets that are larger
than a single entry.
"""
phase = ReadPhases.read
priority = 11
@classmethod
def check(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
if src.h5_type == "dataset" and "neurodata_type" not in src.attrs:
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
return obj.shape != ()
else:
return False
@classmethod
def apply(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
res = {
"array": H5ArrayPath(file=src.h5f_path, path=src.path),
"hdf5_path": src.path,
"name": src.parts[-1],
**src.attrs,
}
return H5ReadResult(
path=src.path, source=src, completed=True, result=res, applied=["ResolveDatasetAsDict"]
)
class ResolveScalars(HDF5Map):
phase = ReadPhases.read
priority = 11 # catchall
@classmethod
def check(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
if src.h5_type == "dataset" and "neurodata_type" not in src.attrs:
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
return obj.shape == ()
else:
return False
@classmethod
def apply(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
res = obj[()]
return H5ReadResult(
path=src.path, source=src, completed=True, result=res, applied=["ResolveScalars"]
)
class ResolveContainerGroups(HDF5Map):
"""
Groups like ``/acquisition``` and others that have no ``neurodata_type``
(and thus no model) are returned as a dictionary with :class:`.HDF5_Path` references to
the children they contain
"""
phase = ReadPhases.read
priority = 9
@classmethod
def check(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
if src.h5_type == "group" and "neurodata_type" not in src.attrs and len(src.attrs) == 0:
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
return len(obj.keys()) > 0
else:
return False
@classmethod
def apply(
cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
"""Simple, just return a dict with references to its children"""
depends = []
with h5py.File(src.h5f_path, "r") as h5f:
obj = h5f.get(src.path)
children = {}
for k, v in obj.items():
children[k] = HDF5_Path(v.name)
depends.append(HDF5_Path(v.name))
# res = {
# 'name': src.parts[-1],
# 'hdf5_path': src.path,
# **children
# }
return H5ReadResult(
path=src.path,
source=src,
completed=True,
result=children,
depends=depends,
applied=["ResolveContainerGroups"],
)
# --------------------------------------------------
# Completion Steps
# --------------------------------------------------
class CompletePassThrough(HDF5Map):
"""
Passthrough map for the construction phase for models that don't need any more work done
- :class:`.ResolveDynamicTable`
- :class:`.ResolveDatasetAsDict`
- :class:`.ResolveScalars`
"""
phase = ReadPhases.construct
priority = 1
@classmethod
def check(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
passthrough_ops = ("ResolveDynamicTable", "ResolveDatasetAsDict", "ResolveScalars")
return any(hasattr(src, "applied") and op in src.applied for op in passthrough_ops)
@classmethod
def apply(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
return src
class CompleteContainerGroups(HDF5Map):
"""
Complete container groups (usually top-level groups like /acquisition)
that do not have a ndueodata type of their own by resolving them as dictionaries
of values (that will then be given to their parent model)
"""
phase = ReadPhases.construct
priority = 3
@classmethod
def check(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
return (
src.model is None
and src.neurodata_type is None
and src.source.h5_type == "group"
and all([depend in completed for depend in src.depends])
)
@classmethod
def apply(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
res, errors, completes = resolve_references(src.result, completed)
return H5ReadResult(
result=res,
errors=errors,
completes=completes,
**src.model_dump(exclude={"result", "errors", "completes"}),
)
class CompleteModelGroups(HDF5Map):
phase = ReadPhases.construct
priority = 4
@classmethod
def check(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
return (
src.model is not None
and src.source.h5_type == "group"
and src.neurodata_type != "NWBFile"
and all([depend in completed for depend in src.depends])
)
@classmethod
def apply(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
# gather any results that were left for completion elsewhere
# first get all already-completed items
res = {k: v for k, v in src.result.items() if not isinstance(v, HDF5_Path)}
unpacked_results, errors, completes = resolve_references(src.result, completed)
res.update(unpacked_results)
# now that we have the model in hand, we can solve any datasets that had an array
# but whose attributes are fixed (and thus should just be an array, rather than a subclass)
for k, v in src.model.model_fields.items():
annotation = unwrap_optional(v.annotation)
if (
inspect.isclass(annotation)
and not issubclass(annotation, BaseModel)
and isinstance(res, dict)
and k in res
and isinstance(res[k], dict)
and "array" in res[k]
):
res[k] = res[k]["array"]
instance = src.model(**res)
return H5ReadResult(
path=src.path,
source=src,
result=instance,
model=src.model,
completed=True,
completes=completes,
neurodata_type=src.neurodata_type,
namespace=src.namespace,
applied=src.applied + ["CompleteModelGroups"],
errors=errors,
)
class CompleteNWBFile(HDF5Map):
"""
The Top-Level NWBFile class is so special cased we just make its own completion special case!
.. todo::
This is truly hideous, just meant as a way to get to the finish line on a late night,
will be cleaned up later
"""
phase = ReadPhases.construct
priority = 11
@classmethod
def check(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> bool:
return src.neurodata_type == "NWBFile" and all(
[depend in completed for depend in src.depends]
)
@classmethod
def apply(
cls, src: H5ReadResult, provider: "SchemaProvider", completed: Dict[str, H5ReadResult]
) -> H5ReadResult:
res = {k: v for k, v in src.result.items() if not isinstance(v, HDF5_Path)}
unpacked_results, errors, completes = resolve_references(src.result, completed)
res.update(unpacked_results)
res["name"] = "root"
res["file_create_date"] = [
datetime.datetime.fromisoformat(ts.decode("utf-8"))
for ts in res["file_create_date"]["array"][:]
]
if "stimulus" not in res:
res["stimulus"] = provider.get_class("core", "NWBFileStimulus")()
electrode_groups = []
egroup_keys = list(res["general"].get("extracellular_ephys", {}).keys())
egroup_dict = {}
for k in egroup_keys:
if k != "electrodes":
egroup = res["general"]["extracellular_ephys"][k]
electrode_groups.append(egroup)
egroup_dict[egroup.hdf5_path] = egroup
del res["general"]["extracellular_ephys"][k]
if len(electrode_groups) > 0:
res["general"]["extracellular_ephys"]["electrode_group"] = electrode_groups
trode_type = provider.get_class("core", "NWBFileGeneralExtracellularEphysElectrodes")
# anmro = list(type(res['general']['extracellular_ephys']['electrodes']).__mro__)
# anmro.insert(1, trode_type)
trodes_original = res["general"]["extracellular_ephys"]["electrodes"]
trodes = trode_type.model_construct(trodes_original.model_dump())
res["general"]["extracellular_ephys"]["electrodes"] = trodes
instance = src.model(**res)
return H5ReadResult(
path=src.path,
source=src,
result=instance,
model=src.model,
completed=True,
completes=completes,
neurodata_type=src.neurodata_type,
namespace=src.namespace,
applied=src.applied + ["CompleteModelGroups"],
errors=errors,
)
class ReadQueue(BaseModel):
"""Container model to store items as they are built"""
h5f: Path = Field(
description=(
"Path to the source hdf5 file used when resolving the queue! "
"Each translation step should handle opening and closing the file, "
"rather than passing a handle around"
)
)
provider: "SchemaProvider" = Field(
description="SchemaProvider used by each of the items in the read queue"
)
queue: Dict[str, H5SourceItem | H5ReadResult] = Field(
default_factory=dict,
description="Items left to be instantiated, keyed by hdf5 path",
)
completed: Dict[str, H5ReadResult] = Field(
default_factory=dict,
description="Items that have already been instantiated, keyed by hdf5 path",
)
model_config = ConfigDict(arbitrary_types_allowed=True)
phases_completed: List[ReadPhases] = Field(
default_factory=list, description="Phases that have already been completed"
)
def apply_phase(self, phase: ReadPhases, max_passes: int = 5) -> None:
phase_maps = [m for m in HDF5Map.__subclasses__() if m.phase == phase]
phase_maps = sorted(phase_maps, key=lambda x: x.priority)
results = []
# TODO: Thread/multiprocess this
for item in self.queue.values():
for op in phase_maps:
if op.check(item, self.provider, self.completed):
# Formerly there was an "exclusive" property in the maps which let
# potentially multiple operations be applied per stage,
# except if an operation was `exclusive` which would break
# iteration over the operations.
# This was removed because it was badly implemented,
# but if there is ever a need to do that,
# then we would need to decide what to do with the multiple results.
results.append(op.apply(item, self.provider, self.completed))
break # out of inner iteration
# remake the source queue and save results
completes = []
for res in results:
# remove the original item
del self.queue[res.path]
if res.completed:
# if the item has been finished and there is some result, add it to the results
if res.result is not None:
self.completed[res.path] = res
# otherwise if the item has been completed and there was no result,
# just drop it.
# if we have completed other things, delete them from the queue
completes.extend(res.completes)
else:
# if we didn't complete the item (eg. we found we needed more dependencies),
# add the updated source to the queue again
if phase != ReadPhases.construct:
self.queue[res.path] = res.source
else:
self.queue[res.path] = res
# delete the ones that were already completed but might have been
# incorrectly added back in the pile
for c in completes:
with contextlib.suppress(KeyError):
del self.queue[c]
# if we have nothing left in our queue, we have completed this phase
# and prepare only ever has one pass
if phase == ReadPhases.plan:
self.phases_completed.append(phase)
return
if len(self.queue) == 0:
self.phases_completed.append(phase)
if phase != ReadPhases.construct:
# if we're not in the last phase, move our completed to our queue
self.queue = self.completed
self.completed = {}
elif max_passes > 0:
self.apply_phase(phase, max_passes=max_passes - 1)
def flatten_hdf(
h5f: h5py.File | h5py.Group, skip: str = "specifications"
) -> Dict[str, H5SourceItem]:
"""
Flatten all child elements of hdf element into a dict of :class:`.H5SourceItem` s
keyed by their path
Args:
h5f (:class:`h5py.File` | :class:`h5py.Group`): HDF file or group to flatten!
"""
items = {}
def _itemize(name: str, obj: h5py.Dataset | h5py.Group) -> None:
if skip in name:
return
leaf = isinstance(obj, h5py.Dataset) or len(obj.keys()) == 0
if isinstance(obj, h5py.Dataset):
h5_type = "dataset"
elif isinstance(obj, h5py.Group):
h5_type = "group"
else:
raise ValueError(f"Object must be a dataset or group! {obj}")
# get references in attrs and datasets to populate dependencies
# depends = get_references(obj)
if not name.startswith("/"):
name = "/" + name
attrs = dict(obj.attrs.items())
items[name] = H5SourceItem.model_construct(
path=name,
h5f_path=h5f.file.filename,
leaf=leaf,
# depends = depends,
h5_type=h5_type,
attrs=attrs,
namespace=attrs.get("namespace"),
neurodata_type=attrs.get("neurodata_type"),
)
h5f.visititems(_itemize)
# then add the root item
_itemize(h5f.name, h5f)
return items
def get_references(obj: h5py.Dataset | h5py.Group) -> List[str]: def get_references(obj: h5py.Dataset | h5py.Group) -> List[str]:
@ -851,60 +66,21 @@ def get_references(obj: h5py.Dataset | h5py.Group) -> List[str]:
List[str]: List of paths that are referenced within this object List[str]: List of paths that are referenced within this object
""" """
# Find references in attrs # Find references in attrs
refs = [ref for ref in obj.attrs.values() if isinstance(ref, h5py.h5r.Reference)] attr_refs = get_attr_references(obj)
dataset_refs = get_dataset_references(obj)
# For datasets, apply checks depending on shape of data. # flatten to list
if isinstance(obj, h5py.Dataset): refs = [ref for ref in attr_refs.values()]
if obj.shape == (): if isinstance(dataset_refs, list):
# scalar refs.extend(dataset_refs)
if isinstance(obj[()], h5py.h5r.Reference):
refs.append(obj[()])
elif isinstance(obj[0], h5py.h5r.Reference):
# single-column
refs.extend(obj[:].tolist())
elif len(obj.dtype) > 1:
# "compound" datasets
for name in obj.dtype.names:
if isinstance(obj[name][0], h5py.h5r.Reference):
refs.extend(obj[name].tolist())
# dereference and get name of reference
if isinstance(obj, h5py.Dataset):
depends = list(set([obj.parent.get(i).name for i in refs]))
else: else:
depends = list(set([obj.get(i).name for i in refs])) for v in dataset_refs.values():
return depends refs.extend(v)
return refs
def resolve_references( def resolve_hardlink(obj: Union[h5py.Group, h5py.Dataset]) -> str:
src: dict, completed: Dict[str, H5ReadResult]
) -> Tuple[dict, List[str], List[HDF5_Path]]:
"""
Recursively replace references to other completed items with their results
"""
completes = []
errors = []
res = {}
for path, item in src.items():
if isinstance(item, HDF5_Path):
other_item = completed.get(item)
if other_item is None:
errors.append(f"Couldn't find: {item}")
res[path] = other_item.result
completes.append(item)
elif isinstance(item, dict):
inner_res, inner_error, inner_completes = resolve_references(item, completed)
res[path] = inner_res
errors.extend(inner_error)
completes.extend(inner_completes)
else:
res[path] = item
return res, errors, completes
def resolve_hardlink(obj: Union[h5py.Group, h5py.Dataset]) -> HDF5_Path:
""" """
Unhelpfully, hardlinks are pretty challenging to detect with h5py, so we have Unhelpfully, hardlinks are pretty challenging to detect with h5py, so we have
to do extra work to check if an item is "real" or a hardlink to another item. to do extra work to check if an item is "real" or a hardlink to another item.
@ -916,4 +92,4 @@ def resolve_hardlink(obj: Union[h5py.Group, h5py.Dataset]) -> HDF5_Path:
We basically dereference the object and return that path instead of the path We basically dereference the object and return that path instead of the path
given by the object's ``name`` given by the object's ``name``
""" """
return HDF5_Path(obj.file[obj.ref].name) return obj.file[obj.ref].name

View file

@ -127,6 +127,7 @@ class LinkMLProvider(Provider):
for schema_needs in adapter.needed_imports.values(): for schema_needs in adapter.needed_imports.values():
for needed in schema_needs: for needed in schema_needs:
adapter.imported.append(ns_adapters[needed]) adapter.imported.append(ns_adapters[needed])
adapter.populate_imports()
# then do the build # then do the build
res = {} res = {}

View file

@ -97,9 +97,9 @@ class Provider(ABC):
module_path = Path(importlib.util.find_spec("nwb_models").origin).parent module_path = Path(importlib.util.find_spec("nwb_models").origin).parent
if self.PROVIDES == "linkml": if self.PROVIDES == "linkml":
namespace_path = module_path / "schema" / "linkml" / namespace namespace_path = module_path / "schema" / "linkml" / namespace_module
elif self.PROVIDES == "pydantic": elif self.PROVIDES == "pydantic":
namespace_path = module_path / "models" / "pydantic" / namespace namespace_path = module_path / "models" / "pydantic" / namespace_module
if version is not None: if version is not None:
version_path = namespace_path / version_module_case(version) version_path = namespace_path / version_module_case(version)

View file

@ -278,7 +278,7 @@ class PydanticProvider(Provider):
nwb_models.models.pydantic.{namespace}.{version} nwb_models.models.pydantic.{namespace}.{version}
""" """
name_pieces = [ name_pieces = [
"nwb_linkml", "nwb_models",
"models", "models",
"pydantic", "pydantic",
module_case(namespace), module_case(namespace),

View file

@ -131,7 +131,7 @@ class SchemaProvider(Provider):
results = {} results = {}
for ns, ns_result in linkml_res.items(): for ns, ns_result in linkml_res.items():
results[ns] = pydantic_provider.build( results[ns] = pydantic_provider.build(
ns_result["namespace"], versions=self.versions, **pydantic_kwargs ns_result.namespace, versions=self.versions, **pydantic_kwargs
) )
return results return results

View file

@ -1,20 +0,0 @@
"""
Types used with hdf5 io
"""
from typing import Any
from pydantic import GetCoreSchemaHandler
from pydantic_core import CoreSchema, core_schema
class HDF5_Path(str):
"""
Trivial subclass of string to indicate that it is a reference to a location within an HDF5 file
"""
@classmethod
def __get_pydantic_core_schema__(
cls, source_type: Any, handler: GetCoreSchemaHandler
) -> CoreSchema:
return core_schema.no_info_after_validator_function(cls, handler(str))

View file

@ -9,10 +9,16 @@ from .fixtures import * # noqa: F403
def pytest_addoption(parser): def pytest_addoption(parser):
parser.addoption(
"--clean",
action="store_true",
default=False,
help="Don't reuse cached resources like cloned git repos or generated files",
)
parser.addoption( parser.addoption(
"--with-output", "--with-output",
action="store_true", action="store_true",
help="dump output in compliance test for richer debugging information", help="keep test outputs for richer debugging information",
) )
parser.addoption( parser.addoption(
"--without-cache", action="store_true", help="Don't use a sqlite cache for network requests" "--without-cache", action="store_true", help="Don't use a sqlite cache for network requests"

Binary file not shown.

View file

@ -0,0 +1,61 @@
# manually transcribed target version of nwb-linkml dataset
# matching the one created by fixtures.py:nwb_file
meta:
id: my_dataset
prefixes:
nwbfile:
- path: "test_nwb.nwb"
- hash: "blake2b:blahblahblahblah"
imports:
core:
as: nwb
version: "2.7.0"
from:
- pypi:
package: nwb-models
hdmf-common:
as: hdmf
version: "1.8.0"
from:
- pypi:
package: nwb-models
extracellular_ephys: &ecephys
electrodes:
group:
- @shank0
- @shank0
- @shank0
- @shank1
- # etc.
shank0:
device: @general.devices.array
shank1:
device: @general.devices.array
# etc.
data: !nwb.NWBFile
file_create_date: [ 2024-01-01 ]
identifier: "1111-1111-1111-1111"
session_description: All that you touch, you change.
session_start_time: 2024-01-01T01:01:01
general:
devices:
- Heka ITC-1600:
- Microscope:
description: My two-photon microscope
manufacturer: The best microscope manufacturer
- array:
description: old reliable
manufacturer: diy
extracellular_ephys: nwbfile:/general/extracellular_ephys
experiment_description: All that you change, changes you.
experimenter: [ "Lauren Oya Olamina" ]
institution: Earthseed Research Institute
keywords:
- behavior
- belief
related_publications: doi:10.1016/j.neuron.2016.12.011

View file

@ -0,0 +1,76 @@
# Sketch of a condensed expression syntax for creation with nwb-linkml
# just a sketch! keeping here for continued work but currently unused.
---
id: my_dataset
prefixes:
nwbfile:
- path: "test_nwb.nwb"
- hash: "blake2b:blahblahblahblah"
imports:
core:
as: nwb
version: "2.7.0"
from:
- pypi:
package: nwb-models
hdmf-common:
as: hdmf
version: "1.8.0"
from:
- pypi:
package: nwb-models
---
extracellular_ephys: &ecephys
electrodes:
group:
- @shank{{i}}
- @shank{{i}}
- @shank{{i}}
# could have expression here like { range(3) } => i
# - ... { range(3) } => i
# or blank ... implies use expression from outer scope
- ...
shank{{i}}:
device: @general.devices.array
...: { range(3) } => i
# expands to
extracellular_ephys:
electrodes:
group:
- @shank0
- @shank0
- @shank0
- @shank1
- # etc.
shank0:
device: @general.devices.array
shank1:
device: @general.devices.array
# etc.
data: !{{ nwb.NWBFile }} <== :nwbfile
file_create_date: [ 2024-01-01 ]
identifier: "1111-1111-1111-1111"
session_description: All that you touch, you change.
session_start_time: 2024-01-01T01:01:01
general:
devices:
- Heka ITC-1600:
- Microscope:
- array:
description: old reliable
manufacturer: diy
extracellular_ephys: *ecephys
experiment_description: All that you change, changes you.
experimenter: [ "Lauren Oya Olamina" ]
institution: Earthseed Research Institute
keywords:
- behavior
- belief
related_publications: doi:10.1016/j.neuron.2016.12.011

29
nwb_linkml/tests/fixtures/__init__.py vendored Normal file
View file

@ -0,0 +1,29 @@
from .nwb import nwb_file, nwb_file_base
from .paths import data_dir, tmp_output_dir, tmp_output_dir_func, tmp_output_dir_mod
from .schema import (
NWBSchemaTest,
TestSchemas,
linkml_schema,
linkml_schema_bare,
nwb_core_fixture,
nwb_core_linkml,
nwb_core_module,
nwb_schema,
)
__all__ = [
"NWBSchemaTest",
"TestSchemas",
"data_dir",
"linkml_schema",
"linkml_schema_bare",
"nwb_core_fixture",
"nwb_core_linkml",
"nwb_core_module",
"nwb_file",
"nwb_file_base",
"nwb_schema",
"tmp_output_dir",
"tmp_output_dir_func",
"tmp_output_dir_mod",
]

477
nwb_linkml/tests/fixtures/nwb.py vendored Normal file
View file

@ -0,0 +1,477 @@
from datetime import datetime
from itertools import product
from pathlib import Path
import numpy as np
import pytest
from hdmf.common import DynamicTable, VectorData
from pynwb import NWBHDF5IO, NWBFile, TimeSeries
from pynwb.base import TimeSeriesReference, TimeSeriesReferenceVectorData
from pynwb.behavior import Position, SpatialSeries
from pynwb.ecephys import LFP, ElectricalSeries
from pynwb.file import Subject
from pynwb.icephys import VoltageClampSeries, VoltageClampStimulusSeries
from pynwb.image import ImageSeries
from pynwb.ophys import (
CorrectedImageStack,
Fluorescence,
ImageSegmentation,
MotionCorrection,
OnePhotonSeries,
OpticalChannel,
RoiResponseSeries,
TwoPhotonSeries,
)
@pytest.fixture(scope="session")
def nwb_file_base() -> NWBFile:
nwbfile = NWBFile(
session_description="All that you touch, you change.", # required
identifier="1111-1111-1111-1111", # required
session_start_time=datetime(year=2024, month=1, day=1), # required
session_id="session_1234", # optional
experimenter=[
"Lauren Oya Olamina",
], # optional
institution="Earthseed Research Institute", # optional
experiment_description="All that you change, changes you.", # optional
keywords=["behavior", "belief"], # optional
related_publications="doi:10.1016/j.neuron.2016.12.011", # optional
)
subject = Subject(
subject_id="001",
age="P90D",
description="mouse 5",
species="Mus musculus",
sex="M",
)
nwbfile.subject = subject
return nwbfile
def _nwb_timeseries(nwbfile: NWBFile) -> NWBFile:
data = np.arange(100, 200, 10)
timestamps = np.arange(10.0)
time_series_with_timestamps = TimeSeries(
name="test_timeseries",
description="an example time series",
data=data,
unit="m",
timestamps=timestamps,
)
nwbfile.add_acquisition(time_series_with_timestamps)
return nwbfile
def _nwb_position(nwbfile: NWBFile) -> NWBFile:
position_data = np.array([np.linspace(0, 10, 50), np.linspace(0, 8, 50)]).T
position_timestamps = np.linspace(0, 50).astype(float) / 200
spatial_series_obj = SpatialSeries(
name="SpatialSeries",
description="(x,y) position in open field",
data=position_data,
timestamps=position_timestamps,
reference_frame="(0,0) is bottom left corner",
)
# name is set to "Position" by default
position_obj = Position(spatial_series=spatial_series_obj)
behavior_module = nwbfile.create_processing_module(
name="behavior", description="processed behavioral data"
)
behavior_module.add(position_obj)
nwbfile.add_trial_column(
name="correct",
description="whether the trial was correct",
)
nwbfile.add_trial(start_time=1.0, stop_time=5.0, correct=True)
nwbfile.add_trial(start_time=6.0, stop_time=10.0, correct=False)
return nwbfile
def _nwb_ecephys(nwbfile: NWBFile) -> NWBFile:
"""
Extracellular Ephys
https://pynwb.readthedocs.io/en/latest/tutorials/domain/ecephys.html
"""
generator = np.random.default_rng()
device = nwbfile.create_device(name="array", description="old reliable", manufacturer="diy")
nwbfile.add_electrode_column(name="label", description="label of electrode")
nshanks = 4
nchannels_per_shank = 3
electrode_counter = 0
for ishank in range(nshanks):
# create an electrode group for this shank
electrode_group = nwbfile.create_electrode_group(
name=f"shank{ishank}",
description=f"electrode group for shank {ishank}",
device=device,
location="brain area",
)
# add electrodes to the electrode table
for ielec in range(nchannels_per_shank):
nwbfile.add_electrode(
group=electrode_group,
label=f"shank{ishank}elec{ielec}",
location="brain area",
)
electrode_counter += 1
all_table_region = nwbfile.create_electrode_table_region(
region=list(range(electrode_counter)), # reference row indices 0 to N-1
description="all electrodes",
)
raw_data = generator.standard_normal((50, 12))
raw_electrical_series = ElectricalSeries(
name="ElectricalSeries",
description="Raw acquisition traces",
data=raw_data,
electrodes=all_table_region,
starting_time=0.0,
# timestamp of the first sample in seconds relative to the session start time
rate=20000.0, # in Hz
)
nwbfile.add_acquisition(raw_electrical_series)
# --------------------------------------------------
# LFP
# --------------------------------------------------
generator = np.random.default_rng()
lfp_data = generator.standard_normal((50, 12))
lfp_electrical_series = ElectricalSeries(
name="ElectricalSeries",
description="LFP data",
data=lfp_data,
electrodes=all_table_region,
starting_time=0.0,
rate=200.0,
)
lfp = LFP(electrical_series=lfp_electrical_series)
ecephys_module = nwbfile.create_processing_module(
name="ecephys", description="processed extracellular electrophysiology data"
)
ecephys_module.add(lfp)
return nwbfile
def _nwb_units(nwbfile: NWBFile) -> NWBFile:
generator = np.random.default_rng()
# Spike Times
nwbfile.add_unit_column(name="quality", description="sorting quality")
firing_rate = 20
n_units = 10
res = 1000
duration = 20
for _ in range(n_units):
spike_times = np.where(generator.random(res * duration) < (firing_rate / res))[0] / res
nwbfile.add_unit(spike_times=spike_times, quality="good")
return nwbfile
def _nwb_icephys(nwbfile: NWBFile) -> NWBFile:
device = nwbfile.create_device(name="Heka ITC-1600")
electrode = nwbfile.create_icephys_electrode(
name="elec0", description="a mock intracellular electrode", device=device
)
stimulus = VoltageClampStimulusSeries(
name="ccss",
data=[1, 2, 3, 4, 5],
starting_time=123.6,
rate=10e3,
electrode=electrode,
gain=0.02,
sweep_number=np.uint64(15),
)
# Create and icephys response
response = VoltageClampSeries(
name="vcs",
data=[0.1, 0.2, 0.3, 0.4, 0.5],
conversion=1e-12,
resolution=np.nan,
starting_time=123.6,
rate=20e3,
electrode=electrode,
gain=0.02,
capacitance_slow=100e-12,
resistance_comp_correction=70.0,
sweep_number=np.uint64(15),
)
# we can also add stimulus template data as follows
rowindex = nwbfile.add_intracellular_recording(
electrode=electrode, stimulus=stimulus, response=response, id=10
)
rowindex2 = nwbfile.add_intracellular_recording(
electrode=electrode,
stimulus=stimulus,
stimulus_start_index=1,
stimulus_index_count=3,
response=response,
response_start_index=2,
response_index_count=3,
id=11,
)
rowindex3 = nwbfile.add_intracellular_recording(electrode=electrode, response=response, id=12)
nwbfile.intracellular_recordings.add_column(
name="recording_tag",
data=["A1", "A2", "A3"],
description="String with a recording tag",
)
location_column = VectorData(
name="location",
data=["Mordor", "Gondor", "Rohan"],
description="Recording location in Middle Earth",
)
lab_category = DynamicTable(
name="recording_lab_data",
description="category table for lab-specific recording metadata",
colnames=[
"location",
],
columns=[
location_column,
],
)
# Add the table as a new category to our intracellular_recordings
nwbfile.intracellular_recordings.add_category(category=lab_category)
nwbfile.intracellular_recordings.add_column(
name="voltage_threshold",
data=[0.1, 0.12, 0.13],
description="Just an example column on the electrodes category table",
category="electrodes",
)
stimulus_template = VoltageClampStimulusSeries(
name="ccst",
data=[0, 1, 2, 3, 4],
starting_time=0.0,
rate=10e3,
electrode=electrode,
gain=0.02,
)
nwbfile.add_stimulus_template(stimulus_template)
nwbfile.intracellular_recordings.add_column(
name="stimulus_template",
data=[
TimeSeriesReference(0, 5, stimulus_template),
# (start_index, index_count, stimulus_template)
TimeSeriesReference(1, 3, stimulus_template),
TimeSeriesReference.empty(stimulus_template),
],
# if there was no data for that recording, use empty reference
description=(
"Column storing the reference to the stimulus template for the recording (rows)."
),
category="stimuli",
col_cls=TimeSeriesReferenceVectorData,
)
icephys_simultaneous_recordings = nwbfile.get_icephys_simultaneous_recordings()
icephys_simultaneous_recordings.add_column(
name="simultaneous_recording_tag",
description="A custom tag for simultaneous_recordings",
)
simultaneous_index = nwbfile.add_icephys_simultaneous_recording(
recordings=[rowindex, rowindex2, rowindex3],
id=12,
simultaneous_recording_tag="LabTag1",
)
repetition_index = nwbfile.add_icephys_repetition(
sequential_recordings=[simultaneous_index], id=17
)
nwbfile.add_icephys_experimental_condition(repetitions=[repetition_index], id=19)
nwbfile.icephys_experimental_conditions.add_column(
name="tag",
data=np.arange(1),
description="integer tag for a experimental condition",
)
return nwbfile
def _nwb_ca_imaging(nwbfile: NWBFile) -> NWBFile:
"""
Calcium Imaging
https://pynwb.readthedocs.io/en/latest/tutorials/domain/ophys.html
"""
generator = np.random.default_rng()
device = nwbfile.create_device(
name="Microscope",
description="My two-photon microscope",
manufacturer="The best microscope manufacturer",
)
optical_channel = OpticalChannel(
name="OpticalChannel",
description="an optical channel",
emission_lambda=500.0,
)
imaging_plane = nwbfile.create_imaging_plane(
name="ImagingPlane",
optical_channel=optical_channel,
imaging_rate=30.0,
description="a very interesting part of the brain",
device=device,
excitation_lambda=600.0,
indicator="GFP",
location="V1",
grid_spacing=[0.01, 0.01],
grid_spacing_unit="meters",
origin_coords=[1.0, 2.0, 3.0],
origin_coords_unit="meters",
)
one_p_series = OnePhotonSeries(
name="OnePhotonSeries",
description="Raw 1p data",
data=np.ones((1000, 100, 100)),
imaging_plane=imaging_plane,
rate=1.0,
unit="normalized amplitude",
)
nwbfile.add_acquisition(one_p_series)
two_p_series = TwoPhotonSeries(
name="TwoPhotonSeries",
description="Raw 2p data",
data=np.ones((1000, 100, 100)),
imaging_plane=imaging_plane,
rate=1.0,
unit="normalized amplitude",
)
nwbfile.add_acquisition(two_p_series)
corrected = ImageSeries(
name="corrected", # this must be named "corrected"
description="A motion corrected image stack",
data=np.ones((1000, 100, 100)),
unit="na",
format="raw",
starting_time=0.0,
rate=1.0,
)
xy_translation = TimeSeries(
name="xy_translation",
description="x,y translation in pixels",
data=np.ones((1000, 2)),
unit="pixels",
starting_time=0.0,
rate=1.0,
)
corrected_image_stack = CorrectedImageStack(
corrected=corrected,
original=one_p_series,
xy_translation=xy_translation,
)
motion_correction = MotionCorrection(corrected_image_stacks=[corrected_image_stack])
ophys_module = nwbfile.create_processing_module(
name="ophys", description="optical physiology processed data"
)
ophys_module.add(motion_correction)
img_seg = ImageSegmentation()
ps = img_seg.create_plane_segmentation(
name="PlaneSegmentation",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series, # optional
)
ophys_module.add(img_seg)
for _ in range(30):
image_mask = np.zeros((100, 100))
# randomly generate example image masks
x = generator.integers(0, 95)
y = generator.integers(0, 95)
image_mask[x : x + 5, y : y + 5] = 1
# add image mask to plane segmentation
ps.add_roi(image_mask=image_mask)
ps2 = img_seg.create_plane_segmentation(
name="PlaneSegmentation2",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series, # optional
)
for _ in range(30):
# randomly generate example starting points for region
x = generator.integers(0, 95)
y = generator.integers(0, 95)
# define an example 4 x 3 region of pixels of weight '1'
pixel_mask = [(ix, iy, 1) for ix in range(x, x + 4) for iy in range(y, y + 3)]
# add pixel mask to plane segmentation
ps2.add_roi(pixel_mask=pixel_mask)
ps3 = img_seg.create_plane_segmentation(
name="PlaneSegmentation3",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series, # optional
)
for _ in range(30):
# randomly generate example starting points for region
x = generator.integers(0, 95)
y = generator.integers(0, 95)
z = generator.integers(0, 15)
# define an example 4 x 3 x 2 voxel region of weight '0.5'
voxel_mask = []
for ix, iy, iz in product(range(x, x + 4), range(y, y + 3), range(z, z + 2)):
voxel_mask.append((ix, iy, iz, 0.5))
# add voxel mask to plane segmentation
ps3.add_roi(voxel_mask=voxel_mask)
rt_region = ps.create_roi_table_region(region=[0, 1], description="the first of two ROIs")
roi_resp_series = RoiResponseSeries(
name="RoiResponseSeries",
description="Fluorescence responses for two ROIs",
data=np.ones((50, 2)), # 50 samples, 2 ROIs
rois=rt_region,
unit="lumens",
rate=30.0,
)
fl = Fluorescence(roi_response_series=roi_resp_series)
ophys_module.add(fl)
return nwbfile
@pytest.fixture(scope="session")
def nwb_file(tmp_output_dir, nwb_file_base, request: pytest.FixtureRequest) -> Path:
"""
NWB File created with pynwb that uses all the weird language features
Borrowing code from pynwb docs in one humonogous fixture function
since there's not really a reason to
"""
nwb_path = tmp_output_dir / "test_nwb.nwb"
if nwb_path.exists() and not request.config.getoption("--clean"):
return nwb_path
nwbfile = nwb_file_base
nwbfile = _nwb_timeseries(nwbfile)
nwbfile = _nwb_position(nwbfile)
nwbfile = _nwb_ecephys(nwbfile)
nwbfile = _nwb_units(nwbfile)
nwbfile = _nwb_icephys(nwbfile)
with NWBHDF5IO(nwb_path, "w") as io:
io.write(nwbfile)
return nwb_path

63
nwb_linkml/tests/fixtures/paths.py vendored Normal file
View file

@ -0,0 +1,63 @@
import shutil
from pathlib import Path
import pytest
@pytest.fixture(scope="session")
def tmp_output_dir(request: pytest.FixtureRequest) -> Path:
path = Path(__file__).parents[1].resolve() / "__tmp__"
if path.exists():
if request.config.getoption("--clean"):
shutil.rmtree(path)
else:
for subdir in path.iterdir():
if subdir.name == "git":
# don't wipe out git repos every time, they don't rly change
continue
elif (
subdir.is_file()
and subdir.parent != path
or subdir.is_file()
and subdir.suffix == ".nwb"
):
continue
elif subdir.is_file():
subdir.unlink(missing_ok=True)
else:
shutil.rmtree(str(subdir))
path.mkdir(exist_ok=True)
return path
@pytest.fixture(scope="function")
def tmp_output_dir_func(tmp_output_dir) -> Path:
"""
tmp output dir that gets cleared between every function
cleans at the start rather than at cleanup in case the output is to be inspected
"""
subpath = tmp_output_dir / "__tmpfunc__"
if subpath.exists():
shutil.rmtree(str(subpath))
subpath.mkdir()
return subpath
@pytest.fixture(scope="module")
def tmp_output_dir_mod(tmp_output_dir) -> Path:
"""
tmp output dir that gets cleared between every function
cleans at the start rather than at cleanup in case the output is to be inspected
"""
subpath = tmp_output_dir / "__tmpmod__"
if subpath.exists():
shutil.rmtree(str(subpath))
subpath.mkdir()
return subpath
@pytest.fixture(scope="session")
def data_dir() -> Path:
path = Path(__file__).parents[1].resolve() / "data"
return path

View file

@ -1,4 +1,3 @@
import shutil
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from types import ModuleType from types import ModuleType
@ -14,70 +13,12 @@ from linkml_runtime.linkml_model import (
TypeDefinition, TypeDefinition,
) )
from nwb_linkml.adapters.namespaces import NamespacesAdapter from nwb_linkml.adapters import NamespacesAdapter
from nwb_linkml.io import schema as io from nwb_linkml.io import schema as io
from nwb_linkml.providers import LinkMLProvider, PydanticProvider from nwb_linkml.providers import LinkMLProvider, PydanticProvider
from nwb_linkml.providers.linkml import LinkMLSchemaBuild from nwb_linkml.providers.linkml import LinkMLSchemaBuild
from nwb_schema_language import Attribute, Dataset, Group from nwb_schema_language import Attribute, Dataset, Group
__all__ = [
"NWBSchemaTest",
"TestSchemas",
"data_dir",
"linkml_schema",
"linkml_schema_bare",
"nwb_core_fixture",
"nwb_schema",
"tmp_output_dir",
"tmp_output_dir_func",
"tmp_output_dir_mod",
]
@pytest.fixture(scope="session")
def tmp_output_dir() -> Path:
path = Path(__file__).parent.resolve() / "__tmp__"
if path.exists():
for subdir in path.iterdir():
if subdir.name == "git":
# don't wipe out git repos every time, they don't rly change
continue
elif subdir.is_file() and subdir.parent != path:
continue
elif subdir.is_file():
subdir.unlink(missing_ok=True)
else:
shutil.rmtree(str(subdir))
path.mkdir(exist_ok=True)
return path
@pytest.fixture(scope="function")
def tmp_output_dir_func(tmp_output_dir) -> Path:
"""
tmp output dir that gets cleared between every function
cleans at the start rather than at cleanup in case the output is to be inspected
"""
subpath = tmp_output_dir / "__tmpfunc__"
if subpath.exists():
shutil.rmtree(str(subpath))
subpath.mkdir()
return subpath
@pytest.fixture(scope="module")
def tmp_output_dir_mod(tmp_output_dir) -> Path:
"""
tmp output dir that gets cleared between every function
cleans at the start rather than at cleanup in case the output is to be inspected
"""
subpath = tmp_output_dir / "__tmpmod__"
if subpath.exists():
shutil.rmtree(str(subpath))
subpath.mkdir()
return subpath
@pytest.fixture(scope="session", params=[{"core_version": "2.7.0", "hdmf_version": "1.8.0"}]) @pytest.fixture(scope="session", params=[{"core_version": "2.7.0", "hdmf_version": "1.8.0"}])
def nwb_core_fixture(request) -> NamespacesAdapter: def nwb_core_fixture(request) -> NamespacesAdapter:
@ -108,12 +49,6 @@ def nwb_core_module(nwb_core_linkml: LinkMLSchemaBuild, tmp_output_dir) -> Modul
return mod return mod
@pytest.fixture(scope="session")
def data_dir() -> Path:
path = Path(__file__).parent.resolve() / "data"
return path
@dataclass @dataclass
class TestSchemas: class TestSchemas:
__test__ = False __test__ = False

View file

@ -151,7 +151,7 @@ def test_name_slot():
assert slot.name == "name" assert slot.name == "name"
assert slot.required assert slot.required
assert slot.range == "string" assert slot.range == "string"
assert slot.identifier is None assert slot.identifier
assert slot.ifabsent is None assert slot.ifabsent is None
assert slot.equals_string is None assert slot.equals_string is None
@ -160,7 +160,7 @@ def test_name_slot():
assert slot.name == "name" assert slot.name == "name"
assert slot.required assert slot.required
assert slot.range == "string" assert slot.range == "string"
assert slot.identifier is None assert slot.identifier
assert slot.ifabsent == "string(FixedName)" assert slot.ifabsent == "string(FixedName)"
assert slot.equals_string == "FixedName" assert slot.equals_string == "FixedName"

View file

@ -5,6 +5,8 @@ Note that since this is largely a subclass, we don't test all of the functionali
because it's tested in the base linkml package. because it's tested in the base linkml package.
""" """
# ruff: noqa: F821 - until the tests here settle down
import re import re
import sys import sys
import typing import typing
@ -16,7 +18,7 @@ import pytest
from numpydantic.ndarray import NDArrayMeta from numpydantic.ndarray import NDArrayMeta
from pydantic import BaseModel from pydantic import BaseModel
from nwb_linkml.generators.pydantic import NWBPydanticGenerator, compile_python from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from ..fixtures import ( from ..fixtures import (
TestSchemas, TestSchemas,

View file

@ -284,14 +284,14 @@ def test_dynamictable_assert_equal_length():
"existing_col": np.arange(10), "existing_col": np.arange(10),
"new_col_1": hdmf.VectorData(value=np.arange(11)), "new_col_1": hdmf.VectorData(value=np.arange(11)),
} }
with pytest.raises(ValidationError, match="Columns are not of equal length"): with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols) _ = MyDT(**cols)
cols = { cols = {
"existing_col": np.arange(11), "existing_col": np.arange(11),
"new_col_1": hdmf.VectorData(value=np.arange(10)), "new_col_1": hdmf.VectorData(value=np.arange(10)),
} }
with pytest.raises(ValidationError, match="Columns are not of equal length"): with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols) _ = MyDT(**cols)
# wrong lengths are fine as long as the index is good # wrong lengths are fine as long as the index is good
@ -308,7 +308,7 @@ def test_dynamictable_assert_equal_length():
"new_col_1": hdmf.VectorData(value=np.arange(100)), "new_col_1": hdmf.VectorData(value=np.arange(100)),
"new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5), "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5),
} }
with pytest.raises(ValidationError, match="Columns are not of equal length"): with pytest.raises(ValidationError, match="columns are not of equal length"):
_ = MyDT(**cols) _ = MyDT(**cols)
@ -344,7 +344,7 @@ def test_vectordata_indexing():
""" """
n_rows = 50 n_rows = 50
value_array, index_array = _ragged_array(n_rows) value_array, index_array = _ragged_array(n_rows)
value_array = np.concat(value_array) value_array = np.concatenate(value_array)
data = hdmf.VectorData(value=value_array) data = hdmf.VectorData(value=value_array)
@ -551,13 +551,13 @@ def test_aligned_dynamictable_indexing(aligned_table):
row.columns row.columns
== pd.MultiIndex.from_tuples( == pd.MultiIndex.from_tuples(
[ [
("table1", "index"), ("table1", "id"),
("table1", "col1"), ("table1", "col1"),
("table1", "col2"), ("table1", "col2"),
("table2", "index"), ("table2", "id"),
("table2", "col3"), ("table2", "col3"),
("table2", "col4"), ("table2", "col4"),
("table3", "index"), ("table3", "id"),
("table3", "col5"), ("table3", "col5"),
("table3", "col6"), ("table3", "col6"),
] ]
@ -592,7 +592,7 @@ def test_mixed_aligned_dynamictable(aligned_table):
AlignedTable, cols = aligned_table AlignedTable, cols = aligned_table
value_array, index_array = _ragged_array(10) value_array, index_array = _ragged_array(10)
value_array = np.concat(value_array) value_array = np.concatenate(value_array)
data = hdmf.VectorData(value=value_array) data = hdmf.VectorData(value=value_array)
index = hdmf.VectorIndex(value=index_array) index = hdmf.VectorIndex(value=index_array)
@ -754,11 +754,11 @@ def test_aligned_dynamictable_ictable(intracellular_recordings_table):
rows.columns rows.columns
== pd.MultiIndex.from_tuples( == pd.MultiIndex.from_tuples(
[ [
("electrodes", "index"), ("electrodes", "id"),
("electrodes", "electrode"), ("electrodes", "electrode"),
("stimuli", "index"), ("stimuli", "id"),
("stimuli", "stimulus"), ("stimuli", "stimulus"),
("responses", "index"), ("responses", "id"),
("responses", "response"), ("responses", "response"),
] ]
) )

View file

@ -1,10 +1,10 @@
import pdb
import h5py import h5py
import networkx as nx
import numpy as np import numpy as np
import pytest import pytest
from nwb_linkml.io.hdf5 import HDF5IO, truncate_file from nwb_linkml.io.hdf5 import HDF5IO, filter_dependency_graph, hdf_dependency_graph, truncate_file
from nwb_linkml.maps.hdf5 import resolve_hardlink
@pytest.mark.skip() @pytest.mark.skip()
@ -13,7 +13,7 @@ def test_hdf_read(data_dir, dset):
NWBFILE = data_dir / dset NWBFILE = data_dir / dset
io = HDF5IO(path=NWBFILE) io = HDF5IO(path=NWBFILE)
# the test for now is just whether we can read it lol # the test for now is just whether we can read it lol
model = io.read() _ = io.read()
def test_truncate_file(tmp_output_dir): def test_truncate_file(tmp_output_dir):
@ -86,15 +86,60 @@ def test_truncate_file(tmp_output_dir):
assert target_h5f["data"]["dataset_contig"].attrs["anattr"] == 1 assert target_h5f["data"]["dataset_contig"].attrs["anattr"] == 1
@pytest.mark.skip() def test_dependencies_hardlink(nwb_file):
def test_flatten_hdf(): """
from nwb_linkml.maps.hdf5 import flatten_hdf Test that hardlinks are resolved (eg. from /processing/ecephys/LFP/ElectricalSeries/electrodes
to /acquisition/ElectricalSeries/electrodes
Args:
nwb_file:
path = "/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb" Returns:
import h5py
h5f = h5py.File(path) """
flat = flatten_hdf(h5f) parent = "/processing/ecephys/LFP/ElectricalSeries"
assert not any(["specifications" in v.path for v in flat.values()]) source = "/processing/ecephys/LFP/ElectricalSeries/electrodes"
pdb.set_trace() target = "/acquisition/ElectricalSeries/electrodes"
raise NotImplementedError("Just a stub for local testing for now, finish me!")
# assert that the hardlink exists in the test file
with h5py.File(str(nwb_file), "r") as h5f:
node = h5f.get(source)
linked_node = resolve_hardlink(node)
assert linked_node == target
graph = hdf_dependency_graph(nwb_file)
# the parent should link to the target as a child
assert (parent, target) in graph.edges([parent])
assert graph.edges[parent, target]["label"] == "child"
@pytest.mark.dev
def test_dependency_graph_images(nwb_file, tmp_output_dir):
"""
Generate images of the dependency graph
"""
graph = hdf_dependency_graph(nwb_file)
A_unfiltered = nx.nx_agraph.to_agraph(graph)
A_unfiltered.draw(tmp_output_dir / "test_nwb_unfiltered.png", prog="dot")
graph = filter_dependency_graph(graph)
A_filtered = nx.nx_agraph.to_agraph(graph)
A_filtered.draw(tmp_output_dir / "test_nwb_filtered.png", prog="dot")
@pytest.mark.parametrize(
"dset",
[
{"name": "aibs.nwb", "source": "sub-738651046_ses-760693773.nwb"},
{
"name": "aibs_ecephys.nwb",
"source": "sub-738651046_ses-760693773_probe-769322820_ecephys.nwb",
},
],
)
@pytest.mark.dev
def test_make_truncated_datasets(tmp_output_dir, data_dir, dset):
input_file = tmp_output_dir / dset["source"]
output_file = data_dir / dset["name"]
if not input_file.exists():
return
truncate_file(input_file, output_file, 10)

View file

@ -0,0 +1,110 @@
"""
Placeholder test module to test reading from pynwb-generated NWB file
"""
from datetime import datetime
import numpy as np
import pandas as pd
import pytest
from numpydantic.interface.hdf5 import H5Proxy
from pydantic import BaseModel
from pynwb import NWBHDF5IO
from pynwb import NWBFile as PyNWBFile
from nwb_linkml.io.hdf5 import HDF5IO
from nwb_models.models import NWBFile
def test_read_from_nwbfile(nwb_file):
"""
Read data from a pynwb HDF5 NWB file
Placeholder that just ensures that reads work and all pydantic models validate,
testing of correctness of read will happen elsewhere.
"""
res = HDF5IO(nwb_file).read()
@pytest.fixture(scope="module")
def read_nwbfile(nwb_file) -> NWBFile:
res = HDF5IO(nwb_file).read()
return res
@pytest.fixture(scope="module")
def read_pynwb(nwb_file) -> PyNWBFile:
nwbf = NWBHDF5IO(nwb_file, "r")
res = nwbf.read()
yield res
nwbf.close()
def _compare_attrs(model: BaseModel, pymodel: object):
for field, value in model.model_dump().items():
if isinstance(value, (dict, H5Proxy)):
continue
if hasattr(pymodel, field):
pynwb_val = getattr(pymodel, field)
if isinstance(pynwb_val, list):
if isinstance(pynwb_val[0], datetime):
# need to normalize UTC numpy.datetime64 with datetime with tz
continue
assert all([val == pval for val, pval in zip(value, pynwb_val)])
else:
if not pynwb_val:
# pynwb instantiates some stuff as empty dicts where we use ``None``
assert bool(pynwb_val) == bool(value)
else:
assert value == pynwb_val
def test_nwbfile_base(read_nwbfile, read_pynwb):
"""
Base attributes on top-level nwbfile are correct
"""
_compare_attrs(read_nwbfile, read_pynwb)
def test_timeseries(read_nwbfile, read_pynwb):
py_acq = read_pynwb.get_acquisition("test_timeseries")
acq = read_nwbfile.acquisition["test_timeseries"]
_compare_attrs(acq, py_acq)
# data and timeseries should be equal
assert np.array_equal(acq.data[:], py_acq.data[:])
assert np.array_equal(acq.timestamps[:], py_acq.timestamps[:])
def test_position(read_nwbfile, read_pynwb):
trials = read_nwbfile.intervals.trials[:]
py_trials = read_pynwb.trials.to_dataframe()
pd.testing.assert_frame_equal(py_trials, trials)
spatial = read_nwbfile.processing["behavior"].Position.SpatialSeries
py_spatial = read_pynwb.processing["behavior"]["Position"]["SpatialSeries"]
_compare_attrs(spatial, py_spatial)
assert np.array_equal(spatial[:], py_spatial.data[:])
assert np.array_equal(spatial.timestamps[:], py_spatial.timestamps[:])
def test_ecephys(read_nwbfile, read_pynwb):
pass
def test_units(read_nwbfile, read_pynwb):
pass
def test_icephys(read_nwbfile, read_pynwb):
pass
def test_ca_imaging(read_nwbfile, read_pynwb):
pass
def test_read_from_yaml(nwb_file):
"""
Read data from a yaml-fied NWB file
"""
pass

View file

@ -1 +1,3 @@
# nwb-models # nwb-models
(README forthcoming, for now see [`nwb-linkml`](https://pypi.org/project/nwb-linkml))

View file

@ -1,6 +1,6 @@
[project] [project]
name = "nwb-models" name = "nwb-models"
version = "0.1.0" version = "0.2.0"
description = "Pydantic/LinkML models for Neurodata Without Borders" description = "Pydantic/LinkML models for Neurodata Without Borders"
authors = [ authors = [
{name = "sneakers-the-rat", email = "sneakers-the-rat@protonmail.com"}, {name = "sneakers-the-rat", email = "sneakers-the-rat@protonmail.com"},

View file

@ -1 +0,0 @@
from .pydantic.core.v2_7_0.namespace import *

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -40,6 +40,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -57,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -20,7 +20,12 @@ from pydantic import (
) )
from ...core.v2_2_0.core_nwb_base import TimeSeries from ...core.v2_2_0.core_nwb_base import TimeSeries
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_0.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -31,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +55,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -127,7 +163,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags: VectorData[Optional[NDArray[Any, str]]] = Field( tags: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""User-defined tags that identify or categorize events.""", description="""User-defined tags that identify or categorize events.""",
json_schema_extra={ json_schema_extra={
@ -136,7 +172,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags_index: Named[Optional[VectorIndex]] = Field( tags_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for tags.""", description="""Index for tags.""",
json_schema_extra={ json_schema_extra={
@ -151,7 +187,7 @@ class TimeIntervals(DynamicTable):
timeseries: Optional[TimeIntervalsTimeseries] = Field( timeseries: Optional[TimeIntervalsTimeseries] = Field(
None, description="""An index into a TimeSeries object.""" None, description="""An index into a TimeSeries object."""
) )
timeseries_index: Named[Optional[VectorIndex]] = Field( timeseries_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for timeseries.""", description="""Index for timeseries.""",
json_schema_extra={ json_schema_extra={
@ -168,14 +204,11 @@ class TimeIntervals(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -24,7 +24,12 @@ from ...core.v2_2_0.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_2_0.core_nwb_misc import Units from ...core.v2_2_0.core_nwb_misc import Units
from ...core.v2_2_0.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_2_0.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_2_0.core_nwb_ophys import ImagingPlane from ...core.v2_2_0.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_0.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -35,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -54,6 +59,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -464,7 +500,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( rel_x: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""x coordinate in electrode group""", description="""x coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -473,7 +509,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( rel_y: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""y coordinate in electrode group""", description="""y coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -482,7 +518,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( rel_z: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""z coordinate in electrode group""", description="""z coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -491,7 +527,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
reference: VectorData[Optional[NDArray[Any, str]]] = Field( reference: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""Description of the reference used for this electrode.""", description="""Description of the reference used for this electrode.""",
json_schema_extra={ json_schema_extra={
@ -505,14 +541,11 @@ class ExtracellularEphysElectrodes(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -26,7 +26,12 @@ from ...core.v2_2_0.core_nwb_base import (
TimeSeriesSync, TimeSeriesSync,
) )
from ...core.v2_2_0.core_nwb_device import Device from ...core.v2_2_0.core_nwb_device import Device
from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_0.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -37,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +61,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -897,14 +933,11 @@ class SweepTable(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -84,17 +115,16 @@ class GrayscaleImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBImage(Image): class RGBImage(Image):
@ -107,17 +137,24 @@ class RGBImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b", "exact_cardinality": 3},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBAImage(Image): class RGBAImage(Image):
@ -130,17 +167,24 @@ class RGBAImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b_a", "exact_cardinality": 4},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImageSeries(TimeSeries): class ImageSeries(TimeSeries):

View file

@ -24,6 +24,7 @@ from ...core.v2_2_0.core_nwb_ecephys import ElectrodeGroup
from ...hdmf_common.v1_1_0.hdmf_common_table import ( from ...hdmf_common.v1_1_0.hdmf_common_table import (
DynamicTable, DynamicTable,
DynamicTableRegion, DynamicTableRegion,
ElementIdentifiers,
VectorData, VectorData,
VectorIndex, VectorIndex,
) )
@ -37,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -443,14 +475,11 @@ class DecompositionSeriesBands(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )
@ -466,7 +495,7 @@ class Units(DynamicTable):
) )
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
spike_times_index: Named[Optional[VectorIndex]] = Field( spike_times_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the spike_times dataset.""", description="""Index into the spike_times dataset.""",
json_schema_extra={ json_schema_extra={
@ -481,7 +510,7 @@ class Units(DynamicTable):
spike_times: Optional[UnitsSpikeTimes] = Field( spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit.""" None, description="""Spike times for each unit."""
) )
obs_intervals_index: Named[Optional[VectorIndex]] = Field( obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the obs_intervals dataset.""", description="""Index into the obs_intervals dataset.""",
json_schema_extra={ json_schema_extra={
@ -493,7 +522,7 @@ class Units(DynamicTable):
} }
}, },
) )
obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( obs_intervals: Optional[VectorData[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
Field( Field(
None, None,
description="""Observation intervals for each unit.""", description="""Observation intervals for each unit.""",
@ -509,7 +538,7 @@ class Units(DynamicTable):
}, },
) )
) )
electrodes_index: Named[Optional[VectorIndex]] = Field( electrodes_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into electrodes.""", description="""Index into electrodes.""",
json_schema_extra={ json_schema_extra={
@ -521,7 +550,7 @@ class Units(DynamicTable):
} }
}, },
) )
electrodes: Named[Optional[DynamicTableRegion]] = Field( electrodes: Optional[Named[DynamicTableRegion]] = Field(
None, None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={ json_schema_extra={
@ -536,16 +565,16 @@ class Units(DynamicTable):
electrode_group: Optional[List[ElectrodeGroup]] = Field( electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from.""" None, description="""Electrode group that each spike unit came from."""
) )
waveform_mean: VectorData[ waveform_mean: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
] ]
] ]
] = Field(None, description="""Spike waveform mean for each spike unit.""") ] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: VectorData[ waveform_sd: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
@ -557,14 +586,11 @@ class Units(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -58,6 +58,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +50,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -166,17 +197,16 @@ class RetinotopyImage(GrayscaleImage):
) )
field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImagingRetinotopy(NWBDataInterface): class ImagingRetinotopy(NWBDataInterface):
@ -204,7 +234,7 @@ class ImagingRetinotopy(NWBDataInterface):
} }
}, },
) )
axis_1_power_map: Named[Optional[AxisMap]] = Field( axis_1_power_map: Optional[Named[AxisMap]] = Field(
None, None,
description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""",
json_schema_extra={ json_schema_extra={
@ -228,7 +258,7 @@ class ImagingRetinotopy(NWBDataInterface):
} }
}, },
) )
axis_2_power_map: Named[Optional[AxisMap]] = Field( axis_2_power_map: Optional[Named[AxisMap]] = Field(
None, None,
description="""Power response to stimulus on the second measured axis.""", description="""Power response to stimulus on the second measured axis.""",
json_schema_extra={ json_schema_extra={
@ -306,17 +336,16 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
) )
field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
# Model rebuild # Model rebuild

View file

@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -168,6 +168,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -40,6 +40,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -57,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -20,7 +20,12 @@ from pydantic import (
) )
from ...core.v2_2_1.core_nwb_base import TimeSeries from ...core.v2_2_1.core_nwb_base import TimeSeries
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_2.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -31,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +55,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -127,7 +163,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags: VectorData[Optional[NDArray[Any, str]]] = Field( tags: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""User-defined tags that identify or categorize events.""", description="""User-defined tags that identify or categorize events.""",
json_schema_extra={ json_schema_extra={
@ -136,7 +172,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags_index: Named[Optional[VectorIndex]] = Field( tags_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for tags.""", description="""Index for tags.""",
json_schema_extra={ json_schema_extra={
@ -151,7 +187,7 @@ class TimeIntervals(DynamicTable):
timeseries: Optional[TimeIntervalsTimeseries] = Field( timeseries: Optional[TimeIntervalsTimeseries] = Field(
None, description="""An index into a TimeSeries object.""" None, description="""An index into a TimeSeries object."""
) )
timeseries_index: Named[Optional[VectorIndex]] = Field( timeseries_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for timeseries.""", description="""Index for timeseries.""",
json_schema_extra={ json_schema_extra={
@ -168,14 +204,11 @@ class TimeIntervals(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -24,7 +24,12 @@ from ...core.v2_2_1.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_2_1.core_nwb_misc import Units from ...core.v2_2_1.core_nwb_misc import Units
from ...core.v2_2_1.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_2_1.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_2_1.core_nwb_ophys import ImagingPlane from ...core.v2_2_1.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_2.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -35,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -54,6 +59,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -464,7 +500,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( rel_x: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""x coordinate in electrode group""", description="""x coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -473,7 +509,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( rel_y: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""y coordinate in electrode group""", description="""y coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -482,7 +518,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( rel_z: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""z coordinate in electrode group""", description="""z coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -491,7 +527,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
reference: VectorData[Optional[NDArray[Any, str]]] = Field( reference: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""Description of the reference used for this electrode.""", description="""Description of the reference used for this electrode.""",
json_schema_extra={ json_schema_extra={
@ -505,14 +541,11 @@ class ExtracellularEphysElectrodes(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -26,7 +26,12 @@ from ...core.v2_2_1.core_nwb_base import (
TimeSeriesSync, TimeSeriesSync,
) )
from ...core.v2_2_1.core_nwb_device import Device from ...core.v2_2_1.core_nwb_device import Device
from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_2.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -37,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +61,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -897,14 +933,11 @@ class SweepTable(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -84,17 +115,16 @@ class GrayscaleImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBImage(Image): class RGBImage(Image):
@ -107,17 +137,24 @@ class RGBImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b", "exact_cardinality": 3},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBAImage(Image): class RGBAImage(Image):
@ -130,17 +167,24 @@ class RGBAImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b_a", "exact_cardinality": 4},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImageSeries(TimeSeries): class ImageSeries(TimeSeries):

View file

@ -24,6 +24,7 @@ from ...core.v2_2_1.core_nwb_ecephys import ElectrodeGroup
from ...hdmf_common.v1_1_2.hdmf_common_table import ( from ...hdmf_common.v1_1_2.hdmf_common_table import (
DynamicTable, DynamicTable,
DynamicTableRegion, DynamicTableRegion,
ElementIdentifiers,
VectorData, VectorData,
VectorIndex, VectorIndex,
) )
@ -37,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -443,14 +475,11 @@ class DecompositionSeriesBands(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )
@ -466,7 +495,7 @@ class Units(DynamicTable):
) )
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
spike_times_index: Named[Optional[VectorIndex]] = Field( spike_times_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the spike_times dataset.""", description="""Index into the spike_times dataset.""",
json_schema_extra={ json_schema_extra={
@ -481,7 +510,7 @@ class Units(DynamicTable):
spike_times: Optional[UnitsSpikeTimes] = Field( spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit.""" None, description="""Spike times for each unit."""
) )
obs_intervals_index: Named[Optional[VectorIndex]] = Field( obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the obs_intervals dataset.""", description="""Index into the obs_intervals dataset.""",
json_schema_extra={ json_schema_extra={
@ -493,7 +522,7 @@ class Units(DynamicTable):
} }
}, },
) )
obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( obs_intervals: Optional[VectorData[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
Field( Field(
None, None,
description="""Observation intervals for each unit.""", description="""Observation intervals for each unit.""",
@ -509,7 +538,7 @@ class Units(DynamicTable):
}, },
) )
) )
electrodes_index: Named[Optional[VectorIndex]] = Field( electrodes_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into electrodes.""", description="""Index into electrodes.""",
json_schema_extra={ json_schema_extra={
@ -521,7 +550,7 @@ class Units(DynamicTable):
} }
}, },
) )
electrodes: Named[Optional[DynamicTableRegion]] = Field( electrodes: Optional[Named[DynamicTableRegion]] = Field(
None, None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={ json_schema_extra={
@ -536,16 +565,16 @@ class Units(DynamicTable):
electrode_group: Optional[List[ElectrodeGroup]] = Field( electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from.""" None, description="""Electrode group that each spike unit came from."""
) )
waveform_mean: VectorData[ waveform_mean: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
] ]
] ]
] = Field(None, description="""Spike waveform mean for each spike unit.""") ] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: VectorData[ waveform_sd: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
@ -557,14 +586,11 @@ class Units(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -58,6 +58,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -31,7 +31,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +50,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -166,17 +197,16 @@ class RetinotopyImage(GrayscaleImage):
) )
field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImagingRetinotopy(NWBDataInterface): class ImagingRetinotopy(NWBDataInterface):
@ -204,7 +234,7 @@ class ImagingRetinotopy(NWBDataInterface):
} }
}, },
) )
axis_1_power_map: Named[Optional[AxisMap]] = Field( axis_1_power_map: Optional[Named[AxisMap]] = Field(
None, None,
description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""",
json_schema_extra={ json_schema_extra={
@ -228,7 +258,7 @@ class ImagingRetinotopy(NWBDataInterface):
} }
}, },
) )
axis_2_power_map: Named[Optional[AxisMap]] = Field( axis_2_power_map: Optional[Named[AxisMap]] = Field(
None, None,
description="""Power response to stimulus on the second measured axis.""", description="""Power response to stimulus on the second measured axis.""",
json_schema_extra={ json_schema_extra={
@ -306,17 +336,16 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage):
) )
field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""")
format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""")
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
# Model rebuild # Model rebuild

View file

@ -149,7 +149,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -168,6 +168,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -40,6 +40,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -57,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -20,7 +20,12 @@ from pydantic import (
) )
from ...core.v2_2_2.core_nwb_base import TimeSeries from ...core.v2_2_2.core_nwb_base import TimeSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -31,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +55,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -127,7 +163,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags: VectorData[Optional[NDArray[Any, str]]] = Field( tags: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""User-defined tags that identify or categorize events.""", description="""User-defined tags that identify or categorize events.""",
json_schema_extra={ json_schema_extra={
@ -136,7 +172,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags_index: Named[Optional[VectorIndex]] = Field( tags_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for tags.""", description="""Index for tags.""",
json_schema_extra={ json_schema_extra={
@ -151,7 +187,7 @@ class TimeIntervals(DynamicTable):
timeseries: Optional[TimeIntervalsTimeseries] = Field( timeseries: Optional[TimeIntervalsTimeseries] = Field(
None, description="""An index into a TimeSeries object.""" None, description="""An index into a TimeSeries object."""
) )
timeseries_index: Named[Optional[VectorIndex]] = Field( timeseries_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for timeseries.""", description="""Index for timeseries.""",
json_schema_extra={ json_schema_extra={
@ -168,14 +204,11 @@ class TimeIntervals(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -24,7 +24,12 @@ from ...core.v2_2_2.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_2_2.core_nwb_misc import Units from ...core.v2_2_2.core_nwb_misc import Units
from ...core.v2_2_2.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_2_2.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_2_2.core_nwb_ophys import ImagingPlane from ...core.v2_2_2.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -35,7 +40,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -54,6 +59,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -464,7 +500,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( rel_x: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""x coordinate in electrode group""", description="""x coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -473,7 +509,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( rel_y: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""y coordinate in electrode group""", description="""y coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -482,7 +518,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( rel_z: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""z coordinate in electrode group""", description="""z coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -491,7 +527,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
reference: VectorData[Optional[NDArray[Any, str]]] = Field( reference: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""Description of the reference used for this electrode.""", description="""Description of the reference used for this electrode.""",
json_schema_extra={ json_schema_extra={
@ -505,14 +541,11 @@ class ExtracellularEphysElectrodes(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -26,7 +26,12 @@ from ...core.v2_2_2.core_nwb_base import (
TimeSeriesSync, TimeSeriesSync,
) )
from ...core.v2_2_2.core_nwb_device import Device from ...core.v2_2_2.core_nwb_device import Device
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -37,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +61,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -897,14 +933,11 @@ class SweepTable(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -84,17 +115,16 @@ class GrayscaleImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBImage(Image): class RGBImage(Image):
@ -107,17 +137,24 @@ class RGBImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b", "exact_cardinality": 3},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBAImage(Image): class RGBAImage(Image):
@ -130,17 +167,24 @@ class RGBAImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b_a", "exact_cardinality": 4},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImageSeries(TimeSeries): class ImageSeries(TimeSeries):

View file

@ -24,6 +24,7 @@ from ...core.v2_2_2.core_nwb_ecephys import ElectrodeGroup
from ...hdmf_common.v1_1_3.hdmf_common_table import ( from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable, DynamicTable,
DynamicTableRegion, DynamicTableRegion,
ElementIdentifiers,
VectorData, VectorData,
VectorIndex, VectorIndex,
) )
@ -37,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -443,14 +475,11 @@ class DecompositionSeriesBands(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )
@ -466,7 +495,7 @@ class Units(DynamicTable):
) )
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
spike_times_index: Named[Optional[VectorIndex]] = Field( spike_times_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the spike_times dataset.""", description="""Index into the spike_times dataset.""",
json_schema_extra={ json_schema_extra={
@ -481,7 +510,7 @@ class Units(DynamicTable):
spike_times: Optional[UnitsSpikeTimes] = Field( spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit.""" None, description="""Spike times for each unit."""
) )
obs_intervals_index: Named[Optional[VectorIndex]] = Field( obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the obs_intervals dataset.""", description="""Index into the obs_intervals dataset.""",
json_schema_extra={ json_schema_extra={
@ -493,7 +522,7 @@ class Units(DynamicTable):
} }
}, },
) )
obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( obs_intervals: Optional[VectorData[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
Field( Field(
None, None,
description="""Observation intervals for each unit.""", description="""Observation intervals for each unit.""",
@ -509,7 +538,7 @@ class Units(DynamicTable):
}, },
) )
) )
electrodes_index: Named[Optional[VectorIndex]] = Field( electrodes_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into electrodes.""", description="""Index into electrodes.""",
json_schema_extra={ json_schema_extra={
@ -521,7 +550,7 @@ class Units(DynamicTable):
} }
}, },
) )
electrodes: Named[Optional[DynamicTableRegion]] = Field( electrodes: Optional[Named[DynamicTableRegion]] = Field(
None, None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={ json_schema_extra={
@ -536,16 +565,16 @@ class Units(DynamicTable):
electrode_group: Optional[List[ElectrodeGroup]] = Field( electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from.""" None, description="""Electrode group that each spike unit came from."""
) )
waveform_mean: VectorData[ waveform_mean: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
] ]
] ]
] = Field(None, description="""Spike waveform mean for each spike unit.""") ] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: VectorData[ waveform_sd: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
@ -557,14 +586,11 @@ class Units(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -39,7 +39,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -58,6 +58,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -152,7 +152,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -171,6 +171,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -40,6 +40,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -57,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -20,7 +20,12 @@ from pydantic import (
) )
from ...core.v2_2_4.core_nwb_base import TimeSeries from ...core.v2_2_4.core_nwb_base import TimeSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -31,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +55,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -127,7 +163,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags: VectorData[Optional[NDArray[Any, str]]] = Field( tags: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""User-defined tags that identify or categorize events.""", description="""User-defined tags that identify or categorize events.""",
json_schema_extra={ json_schema_extra={
@ -136,7 +172,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags_index: Named[Optional[VectorIndex]] = Field( tags_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for tags.""", description="""Index for tags.""",
json_schema_extra={ json_schema_extra={
@ -151,7 +187,7 @@ class TimeIntervals(DynamicTable):
timeseries: Optional[TimeIntervalsTimeseries] = Field( timeseries: Optional[TimeIntervalsTimeseries] = Field(
None, description="""An index into a TimeSeries object.""" None, description="""An index into a TimeSeries object."""
) )
timeseries_index: Named[Optional[VectorIndex]] = Field( timeseries_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for timeseries.""", description="""Index for timeseries.""",
json_schema_extra={ json_schema_extra={
@ -168,14 +204,11 @@ class TimeIntervals(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -25,7 +25,12 @@ from ...core.v2_2_4.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_2_4.core_nwb_misc import Units from ...core.v2_2_4.core_nwb_misc import Units
from ...core.v2_2_4.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_2_4.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_2_4.core_nwb_ophys import ImagingPlane from ...core.v2_2_4.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -36,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -55,6 +60,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -440,7 +476,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( rel_x: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""x coordinate in electrode group""", description="""x coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -449,7 +485,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( rel_y: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""y coordinate in electrode group""", description="""y coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -458,7 +494,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( rel_z: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""z coordinate in electrode group""", description="""z coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -467,7 +503,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
reference: VectorData[Optional[NDArray[Any, str]]] = Field( reference: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""Description of the reference used for this electrode.""", description="""Description of the reference used for this electrode.""",
json_schema_extra={ json_schema_extra={
@ -481,14 +517,11 @@ class ExtracellularEphysElectrodes(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -26,7 +26,12 @@ from ...core.v2_2_4.core_nwb_base import (
TimeSeriesSync, TimeSeriesSync,
) )
from ...core.v2_2_4.core_nwb_device import Device from ...core.v2_2_4.core_nwb_device import Device
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -37,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +61,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -897,14 +933,11 @@ class SweepTable(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -84,17 +115,16 @@ class GrayscaleImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBImage(Image): class RGBImage(Image):
@ -107,17 +137,24 @@ class RGBImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b", "exact_cardinality": 3},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBAImage(Image): class RGBAImage(Image):
@ -130,17 +167,24 @@ class RGBAImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b_a", "exact_cardinality": 4},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImageSeries(TimeSeries): class ImageSeries(TimeSeries):

View file

@ -24,6 +24,7 @@ from ...core.v2_2_4.core_nwb_ecephys import ElectrodeGroup
from ...hdmf_common.v1_1_3.hdmf_common_table import ( from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable, DynamicTable,
DynamicTableRegion, DynamicTableRegion,
ElementIdentifiers,
VectorData, VectorData,
VectorIndex, VectorIndex,
) )
@ -37,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -443,14 +475,11 @@ class DecompositionSeriesBands(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )
@ -466,7 +495,7 @@ class Units(DynamicTable):
) )
name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}}) name: str = Field("Units", json_schema_extra={"linkml_meta": {"ifabsent": "string(Units)"}})
spike_times_index: Named[Optional[VectorIndex]] = Field( spike_times_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the spike_times dataset.""", description="""Index into the spike_times dataset.""",
json_schema_extra={ json_schema_extra={
@ -481,7 +510,7 @@ class Units(DynamicTable):
spike_times: Optional[UnitsSpikeTimes] = Field( spike_times: Optional[UnitsSpikeTimes] = Field(
None, description="""Spike times for each unit.""" None, description="""Spike times for each unit."""
) )
obs_intervals_index: Named[Optional[VectorIndex]] = Field( obs_intervals_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into the obs_intervals dataset.""", description="""Index into the obs_intervals dataset.""",
json_schema_extra={ json_schema_extra={
@ -493,7 +522,7 @@ class Units(DynamicTable):
} }
}, },
) )
obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( obs_intervals: Optional[VectorData[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = (
Field( Field(
None, None,
description="""Observation intervals for each unit.""", description="""Observation intervals for each unit.""",
@ -509,7 +538,7 @@ class Units(DynamicTable):
}, },
) )
) )
electrodes_index: Named[Optional[VectorIndex]] = Field( electrodes_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into electrodes.""", description="""Index into electrodes.""",
json_schema_extra={ json_schema_extra={
@ -521,7 +550,7 @@ class Units(DynamicTable):
} }
}, },
) )
electrodes: Named[Optional[DynamicTableRegion]] = Field( electrodes: Optional[Named[DynamicTableRegion]] = Field(
None, None,
description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""",
json_schema_extra={ json_schema_extra={
@ -536,16 +565,16 @@ class Units(DynamicTable):
electrode_group: Optional[List[ElectrodeGroup]] = Field( electrode_group: Optional[List[ElectrodeGroup]] = Field(
None, description="""Electrode group that each spike unit came from.""" None, description="""Electrode group that each spike unit came from."""
) )
waveform_mean: VectorData[ waveform_mean: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
] ]
] ]
] = Field(None, description="""Spike waveform mean for each spike unit.""") ] = Field(None, description="""Spike waveform mean for each spike unit.""")
waveform_sd: VectorData[ waveform_sd: Optional[
Optional[ VectorData[
Union[ Union[
NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples"], float],
NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float],
@ -557,14 +586,11 @@ class Units(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -31,6 +31,7 @@ from ...core.v2_2_4.core_nwb_image import ImageSeries, ImageSeriesExternalFile
from ...hdmf_common.v1_1_3.hdmf_common_table import ( from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable, DynamicTable,
DynamicTableRegion, DynamicTableRegion,
ElementIdentifiers,
VectorData, VectorData,
VectorIndex, VectorIndex,
) )
@ -44,7 +45,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -63,6 +64,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -322,7 +354,7 @@ class PlaneSegmentation(DynamicTable):
None, None,
description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""",
) )
pixel_mask_index: Named[Optional[VectorIndex]] = Field( pixel_mask_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into pixel_mask.""", description="""Index into pixel_mask.""",
json_schema_extra={ json_schema_extra={
@ -338,7 +370,7 @@ class PlaneSegmentation(DynamicTable):
None, None,
description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""",
) )
voxel_mask_index: Named[Optional[VectorIndex]] = Field( voxel_mask_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index into voxel_mask.""", description="""Index into voxel_mask.""",
json_schema_extra={ json_schema_extra={
@ -373,14 +405,11 @@ class PlaneSegmentation(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -159,7 +159,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -178,6 +178,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -28,7 +28,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -47,6 +47,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -21,7 +21,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -40,6 +40,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -38,7 +38,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -57,6 +57,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}

View file

@ -20,7 +20,12 @@ from pydantic import (
) )
from ...core.v2_2_5.core_nwb_base import TimeSeries from ...core.v2_2_5.core_nwb_base import TimeSeries
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -31,7 +36,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -50,6 +55,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -127,7 +163,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags: VectorData[Optional[NDArray[Any, str]]] = Field( tags: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""User-defined tags that identify or categorize events.""", description="""User-defined tags that identify or categorize events.""",
json_schema_extra={ json_schema_extra={
@ -136,7 +172,7 @@ class TimeIntervals(DynamicTable):
} }
}, },
) )
tags_index: Named[Optional[VectorIndex]] = Field( tags_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for tags.""", description="""Index for tags.""",
json_schema_extra={ json_schema_extra={
@ -151,7 +187,7 @@ class TimeIntervals(DynamicTable):
timeseries: Optional[TimeIntervalsTimeseries] = Field( timeseries: Optional[TimeIntervalsTimeseries] = Field(
None, description="""An index into a TimeSeries object.""" None, description="""An index into a TimeSeries object."""
) )
timeseries_index: Named[Optional[VectorIndex]] = Field( timeseries_index: Optional[Named[VectorIndex]] = Field(
None, None,
description="""Index for timeseries.""", description="""Index for timeseries.""",
json_schema_extra={ json_schema_extra={
@ -168,14 +204,11 @@ class TimeIntervals(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -25,7 +25,12 @@ from ...core.v2_2_5.core_nwb_icephys import IntracellularElectrode, SweepTable
from ...core.v2_2_5.core_nwb_misc import Units from ...core.v2_2_5.core_nwb_misc import Units
from ...core.v2_2_5.core_nwb_ogen import OptogeneticStimulusSite from ...core.v2_2_5.core_nwb_ogen import OptogeneticStimulusSite
from ...core.v2_2_5.core_nwb_ophys import ImagingPlane from ...core.v2_2_5.core_nwb_ophys import ImagingPlane
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -36,7 +41,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -55,6 +60,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -440,7 +476,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( rel_x: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""x coordinate in electrode group""", description="""x coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -449,7 +485,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( rel_y: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""y coordinate in electrode group""", description="""y coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -458,7 +494,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( rel_z: Optional[VectorData[NDArray[Any, float]]] = Field(
None, None,
description="""z coordinate in electrode group""", description="""z coordinate in electrode group""",
json_schema_extra={ json_schema_extra={
@ -467,7 +503,7 @@ class ExtracellularEphysElectrodes(DynamicTable):
} }
}, },
) )
reference: VectorData[Optional[NDArray[Any, str]]] = Field( reference: Optional[VectorData[NDArray[Any, str]]] = Field(
None, None,
description="""Description of the reference used for this electrode.""", description="""Description of the reference used for this electrode.""",
json_schema_extra={ json_schema_extra={
@ -481,14 +517,11 @@ class ExtracellularEphysElectrodes(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -26,7 +26,12 @@ from ...core.v2_2_5.core_nwb_base import (
TimeSeriesSync, TimeSeriesSync,
) )
from ...core.v2_2_5.core_nwb_device import Device from ...core.v2_2_5.core_nwb_device import Device
from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...hdmf_common.v1_1_3.hdmf_common_table import (
DynamicTable,
ElementIdentifiers,
VectorData,
VectorIndex,
)
metamodel_version = "None" metamodel_version = "None"
@ -37,7 +42,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -56,6 +61,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -897,14 +933,11 @@ class SweepTable(DynamicTable):
description="""The names of the columns in this table. This should be used to specify an order to the columns.""", description="""The names of the columns in this table. This should be used to specify an order to the columns.""",
) )
description: str = Field(..., description="""Description of what is in this dynamic table.""") description: str = Field(..., description="""Description of what is in this dynamic table.""")
id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( id: ElementIdentifiers = Field(
..., ...,
description="""Array of unique identifiers for the rows of this dynamic table.""", description="""Array of unique identifiers for the rows of this dynamic table.""",
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}},
) )
vector_data: Optional[List[VectorData]] = Field(
None, description="""Vector columns of this dynamic table."""
)
vector_index: Optional[List[VectorIndex]] = Field( vector_index: Optional[List[VectorIndex]] = Field(
None, description="""Indices for the vector columns of this dynamic table.""" None, description="""Indices for the vector columns of this dynamic table."""
) )

View file

@ -22,7 +22,7 @@ class ConfiguredBaseModel(BaseModel):
model_config = ConfigDict( model_config = ConfigDict(
validate_assignment=True, validate_assignment=True,
validate_default=True, validate_default=True,
extra="forbid", extra="allow",
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
use_enum_values=True, use_enum_values=True,
strict=False, strict=False,
@ -41,6 +41,37 @@ class ConfiguredBaseModel(BaseModel):
else: else:
raise KeyError("No value or data field to index from") raise KeyError("No value or data field to index from")
@field_validator("*", mode="wrap")
@classmethod
def coerce_value(cls, v: Any, handler) -> Any:
"""Try to rescue instantiation by using the value field"""
try:
return handler(v)
except Exception as e1:
try:
return handler(v.value)
except AttributeError:
try:
return handler(v["value"])
except (IndexError, KeyError, TypeError):
raise e1
@field_validator("*", mode="before")
@classmethod
def coerce_subclass(cls, v: Any, info) -> Any:
"""Recast parent classes into child classes"""
if isinstance(v, BaseModel):
annotation = cls.model_fields[info.field_name].annotation
while hasattr(annotation, "__args__"):
annotation = annotation.__args__[0]
try:
if issubclass(annotation, type(v)) and annotation is not type(v):
v = annotation(**{**v.__dict__, **v.__pydantic_extra__})
except TypeError:
# fine, annotation is a non-class type like a TypeVar
pass
return v
class LinkMLMeta(RootModel): class LinkMLMeta(RootModel):
root: Dict[str, Any] = {} root: Dict[str, Any] = {}
@ -84,17 +115,16 @@ class GrayscaleImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {"array": {"dimensions": [{"alias": "x"}, {"alias": "y"}]}}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBImage(Image): class RGBImage(Image):
@ -107,17 +137,24 @@ class RGBImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 3 r_g_b"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b", "exact_cardinality": 3},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class RGBAImage(Image): class RGBAImage(Image):
@ -130,17 +167,24 @@ class RGBAImage(Image):
) )
name: str = Field(...) name: str = Field(...)
value: Optional[NDArray[Shape["* x, * y, 4 r_g_b_a"], float]] = Field(
None,
json_schema_extra={
"linkml_meta": {
"array": {
"dimensions": [
{"alias": "x"},
{"alias": "y"},
{"alias": "r_g_b_a", "exact_cardinality": 4},
]
}
}
},
)
resolution: Optional[float] = Field( resolution: Optional[float] = Field(
None, description="""Pixel resolution of the image, in pixels per centimeter.""" None, description="""Pixel resolution of the image, in pixels per centimeter."""
) )
description: Optional[str] = Field(None, description="""Description of the image.""") description: Optional[str] = Field(None, description="""Description of the image.""")
value: Optional[
Union[
NDArray[Shape["* x, * y"], float],
NDArray[Shape["* x, * y, 3 r_g_b"], float],
NDArray[Shape["* x, * y, 4 r_g_b_a"], float],
]
] = Field(None)
class ImageSeries(TimeSeries): class ImageSeries(TimeSeries):

Some files were not shown because too many files have changed in this diff Show more