refactor nwb_linkml to src layout

This commit is contained in:
sneakers-the-rat 2023-09-05 19:25:20 -07:00
parent 9306f7e688
commit ccc09de400
83 changed files with 95 additions and 45 deletions

View file

@ -21,8 +21,10 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: pip install .[tests] pytest-md pytest-emoji run: pip install .[tests] pytest-md pytest-emoji
working-directory: nwb_linkml
- uses: pavelzw/pytest-action@v2 - uses: pavelzw/pytest-action@v2
working-directory: nwb_linkml
with: with:
emoji: true emoji: true
verbose: true verbose: true

View file

@ -0,0 +1,2 @@

View file

@ -1 +0,0 @@
from nwb_linkml.maps import preload

View file

@ -1,4 +0,0 @@
from nwb_linkml.adapters.adapter import Adapter
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.adapters.classes import ClassAdapter

View file

@ -1,3 +0,0 @@
# Import everything so it's defined, but shoudlnt' necessarily be used from here
from nwb_linkml.maps.preload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP

View file

@ -6,8 +6,8 @@ authors = ["sneakers-the-rat <JLSaunders987@gmail.com>"]
license = "GPL-3.0" license = "GPL-3.0"
readme = "README.md" readme = "README.md"
packages = [ packages = [
{include = "nwb_linkml"}, {include = "nwb_linkml", from="src"},
{include = "nwb_schema_language", from="nwb_schema_language/src"} {include = "nwb_schema_language", from="../nwb_schema_language/src"}
] ]
[tool.poetry.dependencies] [tool.poetry.dependencies]

View file

@ -0,0 +1 @@
from nwb_linkml.src.nwb_linkml.maps import preload

View file

@ -0,0 +1 @@
from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter

View file

@ -1,15 +1,13 @@
""" """
Adapters to linkML classes Adapters to linkML classes
""" """
import pdb
import re import re
from abc import abstractmethod from abc import abstractmethod
from typing import List, Optional from typing import List, Optional
from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.adapter import Adapter, BuildResult
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.lang_elements import Arraylike
CAMEL_TO_SNAKE = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))') CAMEL_TO_SNAKE = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
""" """

View file

@ -11,7 +11,7 @@ from pydantic import PrivateAttr
from nwb_schema_language import Dataset, ReferenceDtype, CompoundDtype, DTypeType from nwb_schema_language import Dataset, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake
from nwb_linkml.adapters.adapter import BuildResult from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP
class DatasetAdapter(ClassAdapter): class DatasetAdapter(ClassAdapter):
cls: Dataset cls: Dataset

View file

@ -9,7 +9,7 @@ from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, D
from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake from nwb_linkml.adapters.classes import ClassAdapter, camel_to_snake
from nwb_linkml.adapters.dataset import DatasetAdapter from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.adapter import BuildResult from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP
class GroupAdapter(ClassAdapter): class GroupAdapter(ClassAdapter):
cls: Group cls: Group

View file

@ -2,7 +2,6 @@
Since NWB doesn't necessarily have a term for a single nwb schema file, we're going Since NWB doesn't necessarily have a term for a single nwb schema file, we're going
to call them "schema" objects to call them "schema" objects
""" """
import pdb
from typing import Optional, List, TYPE_CHECKING, Type from typing import Optional, List, TYPE_CHECKING, Type
from pathlib import Path from pathlib import Path
from pydantic import Field, PrivateAttr from pydantic import Field, PrivateAttr
@ -11,7 +10,7 @@ from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.dataset import DatasetAdapter from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.group import GroupAdapter from nwb_linkml.adapters.group import GroupAdapter
if TYPE_CHECKING: if TYPE_CHECKING:
from nwb_linkml.adapters.namespaces import NamespacesAdapter pass
from nwb_schema_language import Group, Dataset from nwb_schema_language import Group, Dataset
from typing import NamedTuple from typing import NamedTuple

View file

@ -25,7 +25,7 @@ from types import ModuleType
from copy import deepcopy from copy import deepcopy
import warnings import warnings
from nwb_linkml.maps.dtype import flat_to_npytyping from nwb_linkml.src.nwb_linkml.maps import flat_to_npytyping
from linkml.generators import PydanticGenerator from linkml.generators import PydanticGenerator
from linkml_runtime.linkml_model.meta import ( from linkml_runtime.linkml_model.meta import (
Annotation, Annotation,

View file

@ -0,0 +1,61 @@
"""
This is a sandbox file that should be split out to its own pydantic-hdf5 package, but just experimenting here to get our bearings
"""
from typing import Optional, List, Dict
from pathlib import Path
from types import ModuleType
import h5py
from nwb_linkml.translate import generate_from_nwbfile
class HDF5IO():
def __init__(self, path:Path):
self.path = Path(path)
self._modules: Dict[str, ModuleType] = {}
@property
def modules(self) -> Dict[str, ModuleType]:
if len(self._modules) == 0:
self._modules = generate_from_nwbfile(self.path)
return self._modules
def process_group(self, group:h5py.Group|h5py.File) -> dict | list:
attrs = dict(group.attrs)
# how to process the group?
# --------------------------------------------------
# list-like
# --------------------------------------------------
# a list of data classes
if 'neurodata_type' not in attrs and \
all([isinstance(v, h5py.Group) for v in group.values()]) and \
all(['neurodata_type' in v.attrs for v in group.values()]):
return [self.process_group(v) for v in group.values()]
# --------------------------------------------------
# dict-like
# --------------------------------------------------
res = {}
for key, val in group.items():
if isinstance(val, h5py.Group):
res[key] = self.process_group(val)
elif isinstance(val, h5py.Dataset):
res[key] = self.process_dataset(val)
return res
def process_dataset(self, data: h5py.Dataset) -> dict | list:
if len(data.shape) == 1:
return list(data[:])
if __name__ == "__main__":
NWBFILE = Path('/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb')
h5f = HDF5IO(NWBFILE)

View file

@ -1,9 +1,8 @@
""" """
Loading/saving NWB Schema yaml files Loading/saving NWB Schema yaml files
""" """
import pdb
from pathlib import Path from pathlib import Path
from typing import TypedDict, List, Dict, Optional from typing import Optional
from pprint import pprint from pprint import pprint
import warnings import warnings
@ -12,9 +11,8 @@ import yaml
from nwb_schema_language import Namespaces, Group, Dataset from nwb_schema_language import Namespaces, Group, Dataset
from nwb_linkml.namespaces import NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO from nwb_linkml.namespaces import NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO
from nwb_linkml.maps import preload from nwb_linkml.src.nwb_linkml.map import PHASES, Map
from nwb_linkml.map import PHASES, Map from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.adapters.schema import SchemaAdapter
@ -70,7 +68,7 @@ def load_schema_file(path:Path, yaml:Optional[dict] = None) -> SchemaAdapter:
) )
return schema return schema
def load_namespace_schema(namespace: Namespaces, path:Path=Path('.')) -> NamespacesAdapter: def load_namespace_schema(namespace: Namespaces, path:Path=Path('..')) -> NamespacesAdapter:
""" """
Load all schema referenced by a namespace file Load all schema referenced by a namespace file

View file

@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import \
TypeDefinition,\ TypeDefinition,\
Prefix,\ Prefix,\
PermissibleValue PermissibleValue
from nwb_linkml.maps.dtype import flat_to_linkml from nwb_linkml.src.nwb_linkml.maps import flat_to_linkml
FlatDType = EnumDefinition( FlatDType = EnumDefinition(

View file

@ -66,7 +66,6 @@ class KeyMap(Map):
def apply_preload(ns_dict) -> dict: def apply_preload(ns_dict) -> dict:
from nwb_linkml.maps import preload
maps = [m for m in Map.instances if m.phase == PHASES.postload] maps = [m for m in Map.instances if m.phase == PHASES.postload]
for amap in maps: for amap in maps:
ns_dict = amap.apply(ns_dict) ns_dict = amap.apply(ns_dict)

View file

@ -0,0 +1,3 @@
# Import everything so it's defined, but shoudlnt' necessarily be used from here
from nwb_linkml.src.nwb_linkml.maps import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.src.nwb_linkml.maps import QUANTITY_MAP

View file

@ -2,7 +2,7 @@
Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models
""" """
from nwb_linkml.map import KeyMap, SCOPE_TYPES, PHASES from nwb_linkml.src.nwb_linkml.map import KeyMap, SCOPE_TYPES, PHASES
MAP_HDMF_DATATYPE_DEF = KeyMap( MAP_HDMF_DATATYPE_DEF = KeyMap(
source="\'data_type_def\'", source="\'data_type_def\'",

View file

@ -1,7 +1,7 @@
""" """
Various visualization routines, mostly to help development for now Various visualization routines, mostly to help development for now
""" """
from typing import TYPE_CHECKING, Optional, List, TypedDict, Union from typing import TYPE_CHECKING, Optional, List, TypedDict
from rich import print from rich import print
import random import random
@ -10,7 +10,7 @@ import dash_cytoscape as cyto
cyto.load_extra_layouts() cyto.load_extra_layouts()
from nwb_schema_language import Namespace, Group, Dataset from nwb_schema_language import Namespace, Group, Dataset
from nwb_linkml.io import load_nwb_core from nwb_linkml.src.nwb_linkml.io import load_nwb_core
if TYPE_CHECKING: if TYPE_CHECKING:
from nwb_linkml.adapters import NamespacesAdapter from nwb_linkml.adapters import NamespacesAdapter

View file

@ -1,7 +1,6 @@
""" """
Convenience functions for translating NWB schema Convenience functions for translating NWB schema
""" """
import pdb
import tempfile import tempfile
from typing import List, Optional, Dict from typing import List, Optional, Dict
from types import ModuleType from types import ModuleType
@ -10,17 +9,12 @@ import json
import h5py import h5py
from linkml_runtime import SchemaView
from linkml_runtime.linkml_model import SchemaDefinition
from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.utils.compile_python import compile_python
from nwb_schema_language import Namespaces from nwb_schema_language import Namespaces
from nwb_linkml.io import load_schema_file from nwb_linkml.src.nwb_linkml.io import load_schema_file
from nwb_linkml.generators.pydantic import NWBPydanticGenerator from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.map import apply_preload from nwb_linkml.src.nwb_linkml.map import apply_preload
from nwb_linkml.adapters import SchemaAdapter, NamespacesAdapter from nwb_linkml.adapters import SchemaAdapter, NamespacesAdapter
def make_namespace_adapter(schema: dict) -> NamespacesAdapter: def make_namespace_adapter(schema: dict) -> NamespacesAdapter:

View file

View file

@ -1,9 +1,7 @@
import pytest import pytest
from typing import Dict
from nwb_linkml.src.nwb_linkml import io
from nwb_linkml import io from nwb_linkml.src.nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.namespaces import NamespacesAdapter
import shutil import shutil
from pathlib import Path from pathlib import Path

View file

@ -13,3 +13,4 @@ def test_generate_pydantic():
#pydantic_module = generate_from_nwbfile(NWBFILE) #pydantic_module = generate_from_nwbfile(NWBFILE)
#pdb.set_trace() #pdb.set_trace()

View file

@ -1,4 +1,4 @@
[pytest] [pytest]
testpaths = testpaths =
tests nwb_linkml/tests

View file

@ -3,7 +3,8 @@ from pathlib import Path
from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.dumpers import yaml_dumper
from nwb_linkml.generators.pydantic import NWBPydanticGenerator from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml import io from nwb_linkml.src.nwb_linkml import io
def generate_core_yaml(output_path:Path): def generate_core_yaml(output_path:Path):
core = io.load_nwb_core() core = io.load_nwb_core()
@ -34,13 +35,13 @@ def parser() -> ArgumentParser:
'--yaml', '--yaml',
help="directory to export linkML schema to", help="directory to export linkML schema to",
type=Path, type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'schema' default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'schema'
) )
parser.add_argument( parser.add_argument(
'--pydantic', '--pydantic',
help="directory to export pydantic models", help="directory to export pydantic models",
type=Path, type=Path,
default=Path(__file__).parent.parent / 'nwb_linkml' / 'models' default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'models'
) )
return parser return parser