some baseline tests, fixing imports, coverage reports

This commit is contained in:
sneakers-the-rat 2023-09-05 21:47:41 -07:00
parent ccc09de400
commit da2eaea51e
29 changed files with 392 additions and 76 deletions

View file

@ -4,7 +4,7 @@ on:
push: push:
jobs: jobs:
build: test:
strategy: strategy:
matrix: matrix:
python-version: [3.11] python-version: [3.11]
@ -20,7 +20,7 @@ jobs:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install dependencies - name: Install dependencies
run: pip install .[tests] pytest-md pytest-emoji run: pip install .[tests]
working-directory: nwb_linkml working-directory: nwb_linkml
- uses: pavelzw/pytest-action@v2 - uses: pavelzw/pytest-action@v2
@ -29,3 +29,8 @@ jobs:
emoji: true emoji: true
verbose: true verbose: true
job-summary: true job-summary: true
- working-directory: nwb_linkml
run: "coveralls --service=github"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

0
nwb_linkml/README.md Normal file
View file

141
nwb_linkml/poetry.lock generated
View file

@ -212,6 +212,87 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
] ]
[[package]]
name = "coverage"
version = "6.5.0"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
{file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
{file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
{file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
{file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
{file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
{file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
{file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
{file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
{file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
{file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
{file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
{file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
{file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
{file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
{file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
{file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
{file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
{file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
{file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
{file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
{file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
{file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
{file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
{file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
{file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
{file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
{file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
{file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
{file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
{file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
{file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
{file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
{file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
{file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
{file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
{file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
{file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
{file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
{file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
{file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
{file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
{file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
{file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
{file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
{file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
{file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
{file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
{file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
{file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
]
[package.extras]
toml = ["tomli"]
[[package]]
name = "coveralls"
version = "3.3.1"
description = "Show coverage stats online via coveralls.io"
optional = true
python-versions = ">= 3.5"
files = [
{file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"},
{file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"},
]
[package.dependencies]
coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0"
docopt = ">=0.6.1"
requests = ">=1.0.0"
[package.extras]
yaml = ["PyYAML (>=3.10)"]
[[package]] [[package]]
name = "curies" name = "curies"
version = "0.6.0" version = "0.6.0"
@ -344,6 +425,16 @@ wrapt = ">=1.10,<2"
[package.extras] [package.extras]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
[[package]]
name = "docopt"
version = "0.6.2"
description = "Pythonic argument parser, that will make you smile"
optional = true
python-versions = "*"
files = [
{file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
]
[[package]] [[package]]
name = "et-xmlfile" name = "et-xmlfile"
version = "1.1.0" version = "1.1.0"
@ -1372,6 +1463,24 @@ pluggy = ">=0.12,<2.0"
[package.extras] [package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "4.1.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]] [[package]]
name = "pytest-depends" name = "pytest-depends"
version = "1.0.1" version = "1.0.1"
@ -1389,6 +1498,20 @@ future-fstrings = "*"
networkx = "*" networkx = "*"
pytest = ">=3" pytest = ">=3"
[[package]]
name = "pytest-emoji"
version = "0.2.0"
description = "A pytest plugin that adds emojis to your test result report"
optional = false
python-versions = ">=3.4"
files = [
{file = "pytest-emoji-0.2.0.tar.gz", hash = "sha256:e1bd4790d87649c2d09c272c88bdfc4d37c1cc7c7a46583087d7c510944571e8"},
{file = "pytest_emoji-0.2.0-py3-none-any.whl", hash = "sha256:6e34ed21970fa4b80a56ad11417456bd873eb066c02315fe9df0fafe6d4d4436"},
]
[package.dependencies]
pytest = ">=4.2.1"
[[package]] [[package]]
name = "pytest-logging" name = "pytest-logging"
version = "2015.11.4" version = "2015.11.4"
@ -1402,6 +1525,20 @@ files = [
[package.dependencies] [package.dependencies]
pytest = ">=2.8.1" pytest = ">=2.8.1"
[[package]]
name = "pytest-md"
version = "0.2.0"
description = "Plugin for generating Markdown reports for pytest results"
optional = false
python-versions = ">=3.6"
files = [
{file = "pytest-md-0.2.0.tar.gz", hash = "sha256:3b248d5b360ea5198e05b4f49c7442234812809a63137ec6cdd3643a40cf0112"},
{file = "pytest_md-0.2.0-py3-none-any.whl", hash = "sha256:4c4cd16fea6d1485e87ee254558712c804a96d2aa9674b780e7eb8fb6526e1d1"},
]
[package.dependencies]
pytest = ">=4.2.1"
[[package]] [[package]]
name = "python-dateutil" name = "python-dateutil"
version = "2.8.2" version = "2.8.2"
@ -2186,9 +2323,9 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[extras] [extras]
dev = [] dev = []
plot = ["dash", "dash-cytoscape"] plot = ["dash", "dash-cytoscape"]
tests = ["pytest", "pytest-depends"] tests = ["coverage", "pytest", "pytest-cov", "pytest-depends", "pytest-emoji", "pytest-md"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.11"
content-hash = "4b3073d732a5ddcc84db255baea64e2df0414967995d20bd902f83a39de0bc10" content-hash = "2d93043f4c9a191ba4a48ffed4739d027cc9b10f26b1f882326d1e385af462cb"

View file

@ -17,17 +17,25 @@ linkml-runtime = "^1.5.6"
#nwb_schema_language = { path = './nwb_schema_language', develop = true, optional = true } #nwb_schema_language = { path = './nwb_schema_language', develop = true, optional = true }
rich = "^13.5.2" rich = "^13.5.2"
linkml = "^1.5.7" linkml = "^1.5.7"
pytest = { version="^7.4.0", optional=true}
pytest-depends = {version="^1.0.1", optional=true}
dash = {version="^2.12.1", optional=true}
dash-cytoscape = {version="^0.3.0", optional=true}
nptyping = "^2.5.0" nptyping = "^2.5.0"
pydantic = "^2.3.0" pydantic = "^2.3.0"
h5py = "^3.9.0" h5py = "^3.9.0"
dash = {version="^2.12.1", optional=true}
dash-cytoscape = {version="^0.3.0", optional=true}
pytest = { version="^7.4.0", optional=true}
pytest-depends = {version="^1.0.1", optional=true}
coverage = {version = "^6.1.1", optional = true}
pytest-md = {version = "^0.2.0", optional = true}
pytest-emoji = {version="^0.2.0", optional = true}
pytest-cov = {version = "^4.1.0", optional = true}
coveralls = {version = "^3.3.1", optional = true}
[tool.poetry.extras] [tool.poetry.extras]
dev = ["nwb_schema_language"] dev = ["nwb_schema_language"]
tests = ["pytest", "pytest-depends"] tests = [
"pytest", "pytest-depends", "coverage", "pytest-md",
"pytest-emoji", "pytest-cov"
]
plot = ["dash", "dash-cytoscape"] plot = ["dash", "dash-cytoscape"]
@ -37,6 +45,10 @@ optional = true
[tool.poetry.group.tests.dependencies] [tool.poetry.group.tests.dependencies]
pytest = "^7.4.0" pytest = "^7.4.0"
pytest-depends = "^1.0.1" pytest-depends = "^1.0.1"
coverage = "^6.1.1"
pytest-md = "^0.2.0"
pytest-emoji = "^0.2.0"
pytest-cov = "^4.1.0"
[tool.poetry.group.plot] [tool.poetry.group.plot]
optional = true optional = true
@ -51,3 +63,21 @@ build-backend = "poetry.core.masonry.api"
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = [
"--cov",
"--cov-append"
]
testpaths = [
"tests"
]
[tool.coverage.run]
branch = true
source = [
"src/nwb_linkml"
]
omit = [
"*/nwb_schema_language/*",
"*/nwb_linkml/models/*"
]

View file

@ -1 +1 @@
from nwb_linkml.src.nwb_linkml.maps import preload from nwb_linkml.maps import preload

View file

@ -1 +1,4 @@
from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.adapters.group import GroupAdapter
from nwb_linkml.adapters.schema import SchemaAdapter

View file

@ -7,7 +7,7 @@ from typing import List, Optional
from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.adapter import Adapter, BuildResult
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.maps import QUANTITY_MAP
CAMEL_TO_SNAKE = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))') CAMEL_TO_SNAKE = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
""" """

View file

@ -11,7 +11,7 @@ from pydantic import PrivateAttr
from nwb_schema_language import Dataset, ReferenceDtype, CompoundDtype, DTypeType from nwb_schema_language import Dataset, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake
from nwb_linkml.adapters.adapter import BuildResult from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.maps import QUANTITY_MAP
class DatasetAdapter(ClassAdapter): class DatasetAdapter(ClassAdapter):
cls: Dataset cls: Dataset

View file

@ -9,7 +9,7 @@ from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, D
from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake
from nwb_linkml.adapters.dataset import DatasetAdapter from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.adapter import BuildResult from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.maps import QUANTITY_MAP
class GroupAdapter(ClassAdapter): class GroupAdapter(ClassAdapter):
cls: Group cls: Group

View file

@ -0,0 +1 @@
from nwb_linkml.generators.pydantic import PydanticGenerator

View file

@ -25,7 +25,7 @@ from types import ModuleType
from copy import deepcopy from copy import deepcopy
import warnings import warnings
from nwb_linkml.src.nwb_linkml.maps import flat_to_npytyping from nwb_linkml.maps import flat_to_npytyping
from linkml.generators import PydanticGenerator from linkml.generators import PydanticGenerator
from linkml_runtime.linkml_model.meta import ( from linkml_runtime.linkml_model.meta import (
Annotation, Annotation,

View file

@ -0,0 +1 @@
from nwb_linkml.io import schema

View file

@ -1,6 +1,8 @@
""" """
Define and manage NWB namespaces in external repositories Define and manage NWB namespaces in external repositories
""" """
import pdb
from typing import Optional
import warnings import warnings
from pathlib import Path from pathlib import Path
import tempfile import tempfile
@ -50,7 +52,7 @@ class GitRepo:
def __init__(self, namespace:NamespaceRepo, commit:str|None=None): def __init__(self, namespace:NamespaceRepo, commit:str|None=None):
self._temp_directory = None self._temp_directory = None
self.namespace = namespace self.namespace = namespace
self.commit = commit self._commit = commit
def _git_call(self, *args) -> subprocess.CompletedProcess: def _git_call(self, *args) -> subprocess.CompletedProcess:
res = subprocess.run( res = subprocess.run(
@ -68,9 +70,7 @@ class GitRepo:
""" """
if self._temp_directory is None: if self._temp_directory is None:
self._temp_directory = Path(tempfile.gettempdir()) / f'nwb_linkml__{self.namespace.name}' self._temp_directory = Path(tempfile.gettempdir()) / f'nwb_linkml__{self.namespace.name}'
if self._temp_directory.exists(): if not self._temp_directory.exists():
warnings.warn(f'Temporary directory already exists! {self._temp_directory}')
else:
self._temp_directory.mkdir(parents=True) self._temp_directory.mkdir(parents=True)
return self._temp_directory return self._temp_directory
@ -99,6 +99,23 @@ class GitRepo:
""" """
return self.temp_directory / self.namespace.path return self.temp_directory / self.namespace.path
@property
def commit(self) -> Optional[str]:
"""
The intended commit to check out.
If ``None``, should be the latest commit when the repo was checked out
Should match :prop:`.active_commit`, differs semantically in that it is used to
set the active_commit, while :prop:`.active_commit` reads what commit is actually checked out
"""
return self._commit
@commit.setter
def commit(self, commit:str):
self._git_call('checkout', commit)
self._commit = commit
def check(self) -> bool: def check(self) -> bool:
""" """
Check if the repository is already cloned and checked out Check if the repository is already cloned and checked out
@ -120,7 +137,7 @@ class GitRepo:
return False return False
# Check that the remote matches # Check that the remote matches
if self.remote.strip('.git') != self.namespace.repository: if self.remote != str(self.namespace.repository):
warnings.warn(f'Repository exists, but has the wrong remote URL.\nExpected: {self.namespace.repository}\nGot:{self.remote.strip(".git")}') warnings.warn(f'Repository exists, but has the wrong remote URL.\nExpected: {self.namespace.repository}\nGot:{self.remote.strip(".git")}')
return False return False
@ -164,6 +181,8 @@ class GitRepo:
self.cleanup() self.cleanup()
res = subprocess.run(['git', 'clone', str(self.namespace.repository), str(self.temp_directory)]) res = subprocess.run(['git', 'clone', str(self.namespace.repository), str(self.temp_directory)])
if self.commit:
self.commit = self.commit
if res.returncode != 0: if res.returncode != 0:
raise GitError(f'Could not clone repository:\n{res.stderr}') raise GitError(f'Could not clone repository:\n{res.stderr}')

View file

@ -4,15 +4,14 @@ Loading/saving NWB Schema yaml files
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
from pprint import pprint from pprint import pprint
import warnings
from linkml_runtime.loaders import yaml_loader from linkml_runtime.loaders import yaml_loader
import yaml import yaml
from nwb_schema_language import Namespaces, Group, Dataset from nwb_schema_language import Namespaces, Group, Dataset
from nwb_linkml.namespaces import NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO from nwb_linkml.io.git import NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO
from nwb_linkml.src.nwb_linkml.map import PHASES, Map from nwb_linkml.map import PHASES, Map
from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.adapters.schema import SchemaAdapter
@ -32,12 +31,9 @@ def load_namespaces(path:Path|NamespaceRepo) -> Namespaces:
ns_dict = load_yaml(path) ns_dict = load_yaml(path)
namespaces = yaml_loader.load(ns_dict, target_class=Namespaces) namespaces = yaml_loader.load(ns_dict, target_class=Namespaces)
return namespaces return namespaces
def load_schema_file(path:Path, yaml:Optional[dict] = None) -> SchemaAdapter: def load_schema_file(path:Path, yaml:Optional[dict] = None) -> SchemaAdapter:
if yaml is not None: if yaml is not None:
source = yaml source = yaml
@ -89,7 +85,7 @@ def load_namespace_schema(namespace: Namespaces, path:Path=Path('..')) -> Namesp
for ns in namespace.namespaces: for ns in namespace.namespaces:
for schema in ns.schema_: for schema in ns.schema_:
if schema.source is None: if schema.source is None:
warnings.warn(f"No source specified for {schema}") # this is normal, we'll resolve later
continue continue
yml_file = (path / schema.source).resolve() yml_file = (path / schema.source).resolve()
sch.append(load_schema_file(yml_file)) sch.append(load_schema_file(yml_file))

View file

@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import \
TypeDefinition,\ TypeDefinition,\
Prefix,\ Prefix,\
PermissibleValue PermissibleValue
from nwb_linkml.src.nwb_linkml.maps import flat_to_linkml from nwb_linkml.maps import flat_to_linkml
FlatDType = EnumDefinition( FlatDType = EnumDefinition(
@ -20,34 +20,6 @@ FlatDType = EnumDefinition(
permissible_values=[PermissibleValue(p) for p in FlatDtype_source.__members__.keys()], permissible_values=[PermissibleValue(p) for p in FlatDtype_source.__members__.keys()],
) )
# DimNameSlot = SlotDefinition(
# name="dim_name",
# range="string",
# description="The name of a dimension"
# )
# DimShapeSlot = SlotDefinition(
# name="dim_shape",
# range="integer",
# required=False
# )
# DimClass = ClassDefinition(
# name="Dimension",
# slots=[DimNameSlot.name, DimShapeSlot.name],
# description="A single dimension within a shape"
# )
# DimSlot = SlotDefinition(
# name="dim",
# range=DimClass.name,
# multivalued=True,
# description="Slot representing the dimensions that a Shape can have"
# )
# ShapeClass = ClassDefinition(
# name="Shape",
# description="A possible shape for an array-like dataset",
# slots=[DimSlot.name]
# )
DTypeTypes = [] DTypeTypes = []
for nwbtype, linkmltype in flat_to_linkml.items(): for nwbtype, linkmltype in flat_to_linkml.items():
amin = None amin = None
@ -85,7 +57,6 @@ NwbLangSchema = SchemaDefinition(
id='nwb.language', id='nwb.language',
description="Adapter objects to mimic the behavior of elements in the nwb-schema-language", description="Adapter objects to mimic the behavior of elements in the nwb-schema-language",
enums=[FlatDType], enums=[FlatDType],
# slots=[DimNameSlot, DimShapeSlot, DimSlot],
classes=[Arraylike, AnyType], classes=[Arraylike, AnyType],
types=DTypeTypes, types=DTypeTypes,
imports=['linkml:types'], imports=['linkml:types'],

View file

@ -1,3 +1,4 @@
# Import everything so it's defined, but shoudlnt' necessarily be used from here # Import everything so it's defined, but shoudlnt' necessarily be used from here
from nwb_linkml.src.nwb_linkml.maps import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.maps.quantity import QUANTITY_MAP
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_npytyping

View file

@ -2,7 +2,7 @@
Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models
""" """
from nwb_linkml.src.nwb_linkml.map import KeyMap, SCOPE_TYPES, PHASES from nwb_linkml.map import KeyMap, SCOPE_TYPES, PHASES
MAP_HDMF_DATATYPE_DEF = KeyMap( MAP_HDMF_DATATYPE_DEF = KeyMap(
source="\'data_type_def\'", source="\'data_type_def\'",

View file

@ -10,7 +10,7 @@ import dash_cytoscape as cyto
cyto.load_extra_layouts() cyto.load_extra_layouts()
from nwb_schema_language import Namespace, Group, Dataset from nwb_schema_language import Namespace, Group, Dataset
from nwb_linkml.src.nwb_linkml.io import load_nwb_core from nwb_linkml.io import load_nwb_core
if TYPE_CHECKING: if TYPE_CHECKING:
from nwb_linkml.adapters import NamespacesAdapter from nwb_linkml.adapters import NamespacesAdapter

View file

@ -12,9 +12,9 @@ import h5py
from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.dumpers import yaml_dumper
from nwb_schema_language import Namespaces from nwb_schema_language import Namespaces
from nwb_linkml.src.nwb_linkml.io import load_schema_file from nwb_linkml.io.schema import load_schema_file
from nwb_linkml.generators.pydantic import NWBPydanticGenerator from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.src.nwb_linkml.map import apply_preload from nwb_linkml.map import apply_preload
from nwb_linkml.adapters import SchemaAdapter, NamespacesAdapter from nwb_linkml.adapters import SchemaAdapter, NamespacesAdapter
def make_namespace_adapter(schema: dict) -> NamespacesAdapter: def make_namespace_adapter(schema: dict) -> NamespacesAdapter:

View file

@ -1,7 +1,7 @@
import pytest import pytest
from nwb_linkml.src.nwb_linkml import io from nwb_linkml.io import schema as io
from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter from nwb_linkml.adapters.namespaces import NamespacesAdapter
import shutil import shutil
from pathlib import Path from pathlib import Path

View file

@ -1,5 +1,5 @@
import pytest import pytest
from .fixtures import nwb_core_fixture from ..fixtures import nwb_core_fixture
from nwb_schema_language import Dataset, Group, Schema from nwb_schema_language import Dataset, Group, Schema
@ -20,3 +20,9 @@ def test_walk_types(nwb_core_fixture, walk_class, known_number):
# pdb.set_trace() # pdb.set_trace()
def test_build_result_add():
"""
Build results can
Returns:
"""

View file

@ -1,4 +1,4 @@
import pytest import pytest
from .fixtures import nwb_core_fixture from ..fixtures import nwb_core_fixture

View file

@ -1,5 +1,7 @@
import pytest import pytest
from .fixtures import nwb_core_fixture from ..fixtures import nwb_core_fixture
from nwb_schema_language import Dataset, Group
from nwb_linkml.adapters import SchemaAdapter
@pytest.mark.parametrize( @pytest.mark.parametrize(
['class_name','schema_file','namespace_name'], ['class_name','schema_file','namespace_name'],
@ -17,4 +19,17 @@ def test_find_type_source(nwb_core_fixture, class_name, schema_file, namespace_n
def test_populate_imports(nwb_core_fixture): def test_populate_imports(nwb_core_fixture):
nwb_core_fixture.populate_imports() nwb_core_fixture.populate_imports()
schema: SchemaAdapter
assert len(nwb_core_fixture.schemas) > 0
for schema in nwb_core_fixture.schemas:
need_imports = [nwb_core_fixture.find_type_source(cls.neurodata_type_def).namespace for cls in schema.created_classes if cls.neurodata_type_inc is not None]
need_imports = [i for i in need_imports if i != schema.namespace]
for i in need_imports:
assert i in schema.imports
def test_build(nwb_core_fixture):
pass

View file

@ -1,5 +1,5 @@
import pytest import pytest
from .fixtures import nwb_core_fixture from ..fixtures import nwb_core_fixture
from nwb_schema_language import Dataset, Group, Schema from nwb_schema_language import Dataset, Group, Schema

View file

@ -5,7 +5,6 @@ import warnings
from .fixtures import nwb_core_fixture, tmp_output_dir from .fixtures import nwb_core_fixture, tmp_output_dir
from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.dumpers import yaml_dumper
from linkml.generators import PydanticGenerator
from nwb_linkml.generators.pydantic import NWBPydanticGenerator from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.lang_elements import NwbLangSchema from nwb_linkml.lang_elements import NwbLangSchema

View file

View file

@ -0,0 +1,94 @@
import pytest
import tempfile
import shutil
import yaml
from nwb_linkml.io.git import GitRepo, GitError, NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO
from nwb_schema_language import Namespaces
@pytest.mark.parametrize(
['source', 'commit'],
[
(NWB_CORE_REPO, 'b4f8838cbfbb7f8a117bd7e0aad19133d26868b4'),
(HDMF_COMMON_REPO, '660b6ac0780dd9d2cb1e56fea8b62c671ca5e2c8')
]
)
def test_gitrepo(source, commit):
"""
Basic functionality of GitRepo
"""
repo = GitRepo(source)
# make a temp directory that exists
first_dir = repo.temp_directory
assert repo.temp_directory.exists()
# find the same repository when it's deleted
shutil.rmtree(str(repo.temp_directory))
repo._temp_directory = None
second_dir = repo.temp_directory
assert first_dir == second_dir
# successfully clone the repository after its deleted
assert not any(repo.temp_directory.iterdir())
repo.clone()
# check that the namespace file exists and has some expected fields
assert repo.namespace_file.exists()
with open(repo.namespace_file, 'r') as nsfile:
ns = yaml.safe_load(nsfile)
# correct model instantiation confirms the repo was cloned successfully
ns_model = Namespaces(**ns)
# setting commit should change the active commit
prior_commit = repo.active_commit
repo.commit = commit
assert prior_commit != repo.active_commit
assert repo.active_commit == commit
assert repo.commit == commit
# remote is gotten correctly
assert repo.remote == str(source.repository)
# cleanup should remove files
repo.cleanup()
assert not any(repo.temp_directory.iterdir())
@pytest.mark.parametrize(
['source', 'commit'],
[
(NWB_CORE_REPO, 'b4f8838cbfbb7f8a117bd7e0aad19133d26868b4')
]
)
def test_gitrepo_check(source, commit):
"""
Our check method should flag common problems with the repo
"""
repo = GitRepo(NWB_CORE_REPO, commit=commit)
# cleanup is tested separately
repo.cleanup()
# check should fail without warning when the repo is empty
assert not repo.check()
repo.clone()
assert repo.check()
# check should fail when repo is at wrong commit
assert repo.active_commit == commit
repo._git_call('checkout', 'HEAD~10')
with pytest.warns(UserWarning, match=".*wrong commit.*"):
assert not repo.check()
repo.commit = commit
assert repo.active_commit == commit
assert repo.check()
# check should fail on repo namespace mismatch
old_repo = repo.namespace.repository
repo.namespace.repository = "https://example.com/a/git/repository"
with pytest.warns(UserWarning, match='.*wrong remote.*'):
assert not repo.check()
repo.namespace.repository = old_repo
assert repo.check()

View file

@ -0,0 +1,42 @@
import pytest
import shutil
import tempfile
import yaml
from yaml import CDumper as Dumper
from pathlib import Path
from nwb_linkml.io.schema import load_yaml
def test_preload_maps():
hdmf_style_naming = {
'groups': [
{
'data_type_def': 'Container',
'data_type_inc': 'MainClass',
'doc': 'Demo group',
'datasets': [
{
'data_type_inc': 'Data'
}
]
}
]
}
temp, temp_name = tempfile.mkstemp(suffix='.yaml')
with open(temp_name, 'w') as temp_f:
yaml.dump(hdmf_style_naming, temp_f, Dumper=Dumper)
loaded = load_yaml(Path(temp_name))
assert 'neurodata_type_def' in loaded['groups'][0].keys()
assert 'data_type_def' not in loaded['groups'][0].keys()
assert 'neurodata_type_inc' in loaded['groups'][0].keys()
assert 'data_type_inc' not in loaded['groups'][0].keys()
assert 'neurodata_type_inc' in loaded['groups'][0]['datasets'][0].keys()
assert 'data_type_inc' not in loaded['groups'][0]['datasets'][0].keys()
shutil.rmtree(temp_name)

View file

@ -1,4 +0,0 @@
[pytest]
testpaths =
nwb_linkml/tests