diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index 8453772..85337e9 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -5,11 +5,15 @@ description = "Translating NWB schema language to LinkML" authors = ["sneakers-the-rat "] license = "GPL-3.0" readme = "README.md" +homepage = "https://nwb-linkml.readthedocs.io" +repository = "https://github.com/p2p-ld/nwb-linkml/" +documentation = "https://nwb-linkml.readthedocs.io" packages = [ {include = "nwb_linkml", from="src"}, #{include = "nwb_schema_language", from="../nwb_schema_language/src"} ] + [tool.poetry.dependencies] python = ">=3.11,<3.13" pyyaml = "^6.0" diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 6d94937..7c10f60 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -8,6 +8,7 @@ from dataclasses import dataclass, field from typing import List, Dict, Type, Generator, Any, Tuple, Optional, TypeVar, TypeVarTuple, Unpack, Literal from pydantic import BaseModel, Field, validator from linkml_runtime.linkml_model import Element, SchemaDefinition, ClassDefinition, SlotDefinition, TypeDefinition +from nwb_schema_language import Dataset, Attribute, Schema, Group # SchemaDefClass = dataclass(SchemaDefinition).__pydantic_model__ @@ -60,7 +61,7 @@ class BuildResult: return out_str -T = TypeVar +T = TypeVar('T', Dataset, Attribute, Schema, Group) Ts = TypeVarTuple('Ts') class Adapter(BaseModel): @@ -148,7 +149,7 @@ class Adapter(BaseModel): - def walk_types(self, input: BaseModel | list | dict, get_type: T | List[Unpack[Ts]] | Tuple[Unpack[T]]) -> Generator[T, None, None]: + def walk_types(self, input: BaseModel | list | dict, get_type: Type[T] | Tuple[Type[T], Type[Unpack[Ts]]]) -> Generator[T | Ts, None, None]: if not isinstance(get_type, (list, tuple)): get_type = [get_type] diff --git a/nwb_linkml/tests/test_adapters/test_adapter.py b/nwb_linkml/tests/test_adapters/test_adapter.py index d13c7c6..d5e6307 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter.py +++ b/nwb_linkml/tests/test_adapters/test_adapter.py @@ -1,10 +1,11 @@ import pdb +import numpy as np import pytest from ..fixtures import nwb_core_fixture from linkml_runtime.linkml_model import SchemaDefinition, ClassDefinition, SlotDefinition, TypeDefinition -from nwb_schema_language import Dataset, Group, Schema, CompoundDtype +from nwb_schema_language import Dataset, Group, Schema, CompoundDtype, Attribute from nwb_linkml.adapters import BuildResult @@ -32,13 +33,22 @@ def test_walk_types(nwb_core_fixture, walk_class, known_number): assert len(class_list) == known_number def test_walk_fields(nwb_core_fixture): - dtype = nwb_core_fixture.walk_fields(nwb_core_fixture, 'dtype') + # should get same number of dtype fields as there are datasets and attributes + compound dtypes + dtype = list(nwb_core_fixture.walk_fields(nwb_core_fixture, 'dtype')) + + dtype_havers = list(nwb_core_fixture.walk_types(nwb_core_fixture, (Dataset, Attribute))) + compound_dtypes = [len(d.dtype) for d in dtype_havers if isinstance(d.dtype, list)] + expected_dtypes = np.sum(compound_dtypes) + len(dtype_havers) + assert expected_dtypes == len(dtype) def test_walk_field_values(nwb_core_fixture): dtype_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, 'dtype', value=None)) - - compounds = [d for d in dtype_models if isinstance(d.dtype, list) and len(d.dtype) > 0 and isinstance(d.dtype[0], CompoundDtype)] + assert all([hasattr(d, 'dtype') for d in dtype_models]) + text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, 'dtype', value='text')) + assert all([d.dtype == 'text' for d in text_models]) + # 135 known value from regex search + assert len(text_models) == len([d for d in dtype_models if d.dtype == 'text']) == 135 def test_build_result(linkml_schema_bare):