This commit is contained in:
sneakers-the-rat 2024-07-09 03:32:37 -07:00
parent f4d397cde1
commit b6af8c9718
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
8 changed files with 31 additions and 23 deletions

View file

@ -1,11 +1,17 @@
"""
Test fixtures primarily for doctests for adapters
"""
import re
import textwrap
from doctest import NORMALIZE_WHITESPACE, ELLIPSIS
from sybil import Document
from sybil import Sybil, Region
from doctest import ELLIPSIS, NORMALIZE_WHITESPACE
from typing import Generator
import yaml
from sybil import Document, Example, Region, Sybil
from sybil.parsers.codeblock import PythonCodeBlockParser
from sybil.parsers.doctest import DocTestParser
import yaml
from nwb_linkml import adapters
# Test adapter generation examples
@ -24,7 +30,7 @@ def _strip_nwb(nwb: str) -> str:
return nwb
def test_adapter_block(example):
def test_adapter_block(example: Example) -> None:
"""
The linkml generated from a nwb example input should match
that provided in the docstring.
@ -44,10 +50,13 @@ def test_adapter_block(example):
assert generated == expected
def parse_adapter_blocks(document: Document):
def parse_adapter_blocks(document: Document) -> Generator[Region, None, None]:
"""
Parse blocks with adapter directives, yield to test with :func:`.test_adapter_block`
"""
for start_match, end_match, source in document.find_region_sources(ADAPTER_START, ADAPTER_END):
# parse
sections = re.split(r":\w+?:", source, re.MULTILINE)
sections = re.split(r":\w+?:", source, flags=re.MULTILINE)
sections = [textwrap.dedent(section).strip() for section in sections]
sections[1] = _strip_nwb(sections[1])
@ -56,9 +65,7 @@ def parse_adapter_blocks(document: Document):
adapter_parser = Sybil(
parsers=[
parse_adapter_blocks
],
parsers=[parse_adapter_blocks],
patterns=["adapters/*.py"],
)

View file

@ -16,6 +16,7 @@ from typing import (
Union,
)
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import (
ClassDefinition,
Definition,
@ -23,7 +24,6 @@ from linkml_runtime.linkml_model import (
SlotDefinition,
TypeDefinition,
)
from linkml_runtime.dumpers import yaml_dumper
from pydantic import BaseModel
from nwb_schema_language import Attribute, Dataset, Group, Schema

View file

@ -3,16 +3,15 @@ Adapters to linkML classes
"""
from abc import abstractmethod
from typing import Type, TypeVar, List, Optional
from typing import List, Optional, Type, TypeVar
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from pydantic import field_validator
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, Group, ReferenceDtype, FlatDtype
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, FlatDtype, Group, ReferenceDtype
T = TypeVar("T", bound=Type[Dataset] | Type[Group])
TI = TypeVar("TI", bound=Dataset | Group)

View file

@ -1,6 +1,7 @@
"""
Adapter for NWB datasets to linkml Classes
"""
from abc import abstractmethod
from typing import ClassVar, Optional, Type
@ -14,7 +15,7 @@ from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
from nwb_linkml.maps.dtype import flat_to_linkml
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import Dataset, CompoundDtype
from nwb_schema_language import CompoundDtype, Dataset
class DatasetMap(Map):
@ -141,9 +142,9 @@ class MapScalarAttributes(DatasetMap):
:linkml:
classes:
- name: starting_time
description: Timestamp of the first sample in seconds. When timestamps are uniformly
spaced, the timestamp of the first sample can be specified and all subsequent
ones calculated from the sampling rate attribute.
description: Timestamp of the first sample in seconds. When timestamps are
uniformly spaced, the timestamp of the first sample can be specified and all
subsequent ones calculated from the sampling rate attribute.
attributes:
name:
name: name
@ -328,8 +329,8 @@ class MapArraylike(DatasetMap):
- null
- null
- null
doc: Binary data representing images across frames. If data are stored in an external
file, this should be an empty 3D array.
doc: Binary data representing images across frames. If data are stored in an
external file, this should be an empty 3D array.
:linkml:
slots:
- name: data
@ -754,6 +755,7 @@ def is_1d(cls: Dataset) -> bool:
def is_compound(cls: Dataset) -> bool:
"""Check if dataset has a compound dtype"""
return (
isinstance(cls.dtype, list)
and len(cls.dtype) > 0

View file

@ -36,7 +36,7 @@ def tmp_output_dir() -> Path:
path = Path(__file__).parent.resolve() / "__tmp__"
if path.exists():
for subdir in path.iterdir():
if subdir.name == 'git':
if subdir.name == "git":
# don't wipe out git repos every time, they don't rly change
continue
elif subdir.is_file() and subdir.parent != path:

View file

@ -1,6 +1,6 @@
import pytest
from nwb_linkml.adapters.dataset import MapScalar
from nwb_linkml.adapters.dataset import MapScalar
from nwb_schema_language import Dataset

View file

@ -265,4 +265,3 @@ class Dataset(DtypeMixin):
dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
default_factory=list
)

View file

@ -24,6 +24,7 @@ target-version = "py311"
include = ["nwb_linkml/**/*.py", "nwb_schema_language/src/**/*.py", "pyproject.toml"]
exclude = [
"docs",
"nwb_linkml/src/nwb_linkml/models/**/*.py",
"nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_language.py",
"nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py",
"tests/__tmp__/**/*"