ruff automatic fixes

This commit is contained in:
sneakers-the-rat 2024-07-01 21:44:35 -07:00
parent 79fc7f23ad
commit 07aa879bb9
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
52 changed files with 229 additions and 331 deletions

View file

@ -1,9 +1,7 @@
import os
import pytest
from doctest import ELLIPSIS, NORMALIZE_WHITESPACE
from tests.fixtures import tmp_output_dir
import pytest
from sybil import Sybil
from sybil.parsers.rest import DocTestParser, PythonCodeBlockParser

View file

@ -1,6 +1,6 @@
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.adapters.group import GroupAdapter
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.group import GroupAdapter
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter

View file

@ -2,32 +2,29 @@
Base class for adapters
"""
import pdb
from abc import abstractmethod
import warnings
from dataclasses import dataclass, field
from typing import (
List,
Dict,
Type,
Generator,
Any,
Tuple,
Generator,
List,
Optional,
Tuple,
Type,
TypeVar,
TypeVarTuple,
Unpack,
Literal,
)
from pydantic import BaseModel, Field, validator
from linkml_runtime.linkml_model import (
Element,
SchemaDefinition,
ClassDefinition,
SchemaDefinition,
SlotDefinition,
TypeDefinition,
)
from nwb_schema_language import Dataset, Attribute, Schema, Group
from pydantic import BaseModel
from nwb_schema_language import Attribute, Dataset, Group, Schema
# SchemaDefClass = dataclass(SchemaDefinition).__pydantic_model__

View file

@ -4,11 +4,13 @@ Adapters to linkML classes
from abc import abstractmethod
from typing import List, Optional
from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, Group, ReferenceDtype
class ClassAdapter(Adapter):

View file

@ -2,20 +2,17 @@
Adapter for NWB datasets to linkml Classes
"""
import pdb
from typing import Optional, List
import warnings
from abc import abstractmethod
from typing import Optional
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from pydantic import PrivateAttr
from nwb_schema_language import Dataset, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps.naming import camel_to_snake
from nwb_linkml.maps.dtype import flat_to_linkml
from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
from nwb_linkml.maps.dtype import flat_to_linkml
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import Dataset
class DatasetMap(Map):
@ -415,10 +412,7 @@ def make_arraylike(cls: Dataset, name: Optional[str] = None) -> ClassDefinition:
slots = []
for dims, shape in dims_shape:
# if there is just a single list of possible dimensions, it's required
if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]):
required = True
# if a dim is present in all possible combinations of dims, make it required
elif all([dims in inner_dim for inner_dim in cls.dims]):
if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]) or all([dims in inner_dim for inner_dim in cls.dims]):
required = True
else:
required = False
@ -447,7 +441,7 @@ def make_arraylike(cls: Dataset, name: Optional[str] = None) -> ClassDefinition:
elif cls.name:
name = cls.name
else:
raise ValueError(f"Dataset has no name or type definition, what do call it?")
raise ValueError("Dataset has no name or type definition, what do call it?")
name = "__".join([name, "Arraylike"])

View file

@ -2,16 +2,14 @@
Adapter for NWB groups to linkml Classes
"""
import pdb
from typing import List
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from linkml_runtime.linkml_model import SlotDefinition
from nwb_schema_language import Dataset, Group, ReferenceDtype, CompoundDtype, DTypeType
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps.naming import camel_to_snake
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import Group
class GroupAdapter(ClassAdapter):

View file

@ -5,26 +5,21 @@ Wraps the :class:`nwb_schema_language.Namespaces` and other objects with conveni
for extracting information and generating translated schema
"""
import pdb
from typing import List, Optional, Dict
from pathlib import Path
from pydantic import BaseModel, Field, validator, PrivateAttr
from pprint import pformat
from linkml_runtime.linkml_model import SchemaDefinition, Annotation
from linkml_runtime.dumpers import yaml_dumper
from time import sleep
from copy import copy
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
from nwb_schema_language import Namespaces
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import Annotation, SchemaDefinition
from pydantic import Field, PrivateAttr
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.lang_elements import NwbLangSchema
from nwb_linkml.ui import AdapterProgress
from nwb_schema_language import Namespaces
class NamespacesAdapter(Adapter):

View file

@ -3,18 +3,16 @@ I don't know if NWB necessarily has a term for a single nwb schema file, so we'r
to call them "schema" objects
"""
from typing import Optional, List, TYPE_CHECKING, Type
from pathlib import Path
from typing import List, NamedTuple, Optional, Type
from linkml_runtime.linkml_model import SchemaDefinition
from pydantic import Field, PrivateAttr
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.dataset import DatasetAdapter
from nwb_linkml.adapters.group import GroupAdapter
from nwb_schema_language import Group, Dataset
from typing import NamedTuple
from linkml_runtime.linkml_model import SchemaDefinition
from nwb_schema_language import Dataset, Group
class SplitSchema(NamedTuple):

View file

@ -3,15 +3,15 @@ Manage the operation of nwb_linkml from environmental variables
"""
import tempfile
from typing import Any
from pathlib import Path
from pydantic import (
Field,
DirectoryPath,
Field,
FieldValidationInfo,
computed_field,
field_validator,
model_validator,
FieldValidationInfo,
)
from pydantic_settings import BaseSettings, SettingsConfigDict

View file

@ -25,43 +25,38 @@ The `serialize` method:
"""
import pdb
import re
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, Dict, Set, Tuple, Optional, TypedDict, Type
import os, sys
from types import ModuleType
from copy import deepcopy, copy
import warnings
import inspect
import sys
import warnings
from copy import copy
from dataclasses import dataclass
from pathlib import Path
from types import ModuleType
from typing import Dict, List, Optional, Tuple, Type
from nwb_linkml.maps import flat_to_nptyping
from jinja2 import Template
from linkml.generators import PydanticGenerator
from linkml.generators.common.type_designators import (
get_type_designator_value,
)
from linkml.utils.ifabsent_functions import ifabsent_value_declaration
from linkml_runtime.linkml_model.meta import (
Annotation,
ClassDefinition,
ClassDefinitionName,
ElementName,
SchemaDefinition,
SlotDefinition,
SlotDefinitionName,
TypeDefinition,
ElementName,
)
from linkml.generators.common.type_designators import (
get_accepted_type_designator_values,
get_type_designator_value,
)
from linkml_runtime.utils.compile_python import file_text
from linkml_runtime.utils.formatutils import camelcase, underscore
from linkml_runtime.utils.schemaview import SchemaView
from linkml_runtime.utils.compile_python import file_text
from linkml.utils.ifabsent_functions import ifabsent_value_declaration
from nwb_linkml.maps.naming import module_case, version_module_case
from jinja2 import Template
from pydantic import BaseModel
from nwb_linkml.maps import flat_to_nptyping
from nwb_linkml.maps.naming import module_case, version_module_case
class LinkML_Meta(BaseModel):
"""Extra LinkML Metadata stored as a class attribute"""

View file

@ -19,29 +19,26 @@ Other TODO:
"""
import pdb
import json
import os
import shutil
import subprocess
import warnings
from typing import Optional, Dict, overload, Type, Union, List
from pathlib import Path
from types import ModuleType
from typing import TYPE_CHECKING, NamedTuple
import json
import subprocess
import shutil
import os
from typing import TYPE_CHECKING, Dict, List, Optional, Union, overload
import h5py
import numpy as np
from pydantic import BaseModel
from tqdm import tqdm
import numpy as np
from nwb_linkml.maps.hdf5 import H5SourceItem, flatten_hdf, ReadPhases, ReadQueue
from nwb_linkml.maps.hdf5 import ReadPhases, ReadQueue, flatten_hdf
# from nwb_linkml.models.core_nwb_file import NWBFile
if TYPE_CHECKING:
from nwb_linkml.models import NWBFile
from nwb_linkml.providers.schema import SchemaProvider
from nwb_linkml.types.hdf5 import HDF5_Path
class HDF5IO:
@ -191,7 +188,7 @@ def read_specs_as_dicts(group: h5py.Group) -> dict:
if isinstance(node, h5py.Dataset):
# make containing dict if they dont exist
pieces = node.name.split("/")
if pieces[-3] not in spec_dict.keys():
if pieces[-3] not in spec_dict:
spec_dict[pieces[-3]] = {}
spec = json.loads(node[()])

View file

@ -3,24 +3,24 @@ Loading/saving NWB Schema yaml files
"""
from pathlib import Path
from typing import Optional
from pprint import pprint
from typing import Optional
from linkml_runtime.loaders import yaml_loader
import yaml
from linkml_runtime.loaders import yaml_loader
from nwb_schema_language import Namespaces, Group, Dataset
from nwb_linkml.providers.git import NamespaceRepo, NWB_CORE_REPO, HDMF_COMMON_REPO
from nwb_linkml.maps.postload import PHASES, KeyMap, apply_postload
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.maps.postload import apply_postload
from nwb_linkml.providers.git import HDMF_COMMON_REPO, NWB_CORE_REPO, NamespaceRepo
from nwb_schema_language import Dataset, Group, Namespaces
def load_yaml(path: Path | str) -> dict:
if isinstance(path, str) and not Path(path).exists():
ns_dict = yaml.safe_load(path)
else:
with open(path, "r") as file:
with open(path) as file:
ns_dict = yaml.safe_load(file)
ns_dict = apply_postload(ns_dict)
return ns_dict
@ -108,7 +108,7 @@ def load_namespace_adapter(
elif isinstance(namespace, Namespaces):
namespaces = namespace
else:
raise ValueError(f"Namespace must be a path, namespace repo, or already loaded namespaces")
raise ValueError("Namespace must be a path, namespace repo, or already loaded namespaces")
if path.is_file():
# given the namespace file itself, so find paths relative to its directory

View file

@ -3,18 +3,17 @@ Language elements in nwb schema language that have a fixed, alternative represen
in LinkML. These are exported as an nwb.language.yml file along with every generated namespace
"""
from nwb_schema_language.datamodel.nwb_schema_pydantic import FlatDtype as FlatDtype_source
from linkml_runtime.linkml_model import (
ClassDefinition,
EnumDefinition,
SchemaDefinition,
SlotDefinition,
TypeDefinition,
Prefix,
PermissibleValue,
Prefix,
SchemaDefinition,
TypeDefinition,
)
from nwb_linkml.maps import flat_to_linkml
from nwb_linkml.maps import flat_to_linkml
from nwb_schema_language.datamodel.nwb_schema_pydantic import FlatDtype as FlatDtype_source
FlatDType = EnumDefinition(
name="FlatDType",

View file

@ -1,5 +1,5 @@
# Import everything so it's defined, but shoudlnt' necessarily be used from here
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_nptyping
from nwb_linkml.maps.map import Map
from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC
from nwb_linkml.maps.quantity import QUANTITY_MAP
from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_nptyping

View file

@ -1,7 +1,8 @@
import numpy as np
from typing import Any, Type
from datetime import datetime
from typing import Any, Type
import nptyping
import numpy as np
flat_to_linkml = {
"float": "float",

View file

@ -6,23 +6,21 @@ so we will make our own mapping class here and re-evaluate whether they should b
"""
import datetime
import pdb
from abc import abstractmethod
from pathlib import Path
from typing import Literal, List, Dict, Optional, Type, Union, Tuple
import inspect
from abc import abstractmethod
from enum import StrEnum
from pathlib import Path
from typing import Dict, List, Literal, Optional, Tuple, Type, Union
import h5py
from enum import StrEnum
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, Field, ConfigDict
from nwb_linkml.providers.schema import SchemaProvider
from nwb_linkml.annotations import unwrap_optional
from nwb_linkml.maps import Map
from nwb_linkml.maps.hdmf import dynamictable_to_model
from nwb_linkml.providers.schema import SchemaProvider
from nwb_linkml.types.hdf5 import HDF5_Path
from nwb_linkml.types.ndarray import NDArrayProxy
from nwb_linkml.annotations import unwrap_optional
class ReadPhases(StrEnum):
@ -548,7 +546,7 @@ class CompleteContainerGroups(HDF5Map):
src.model is None
and src.neurodata_type is None
and src.source.h5_type == "group"
and all([depend in completed.keys() for depend in src.depends])
and all([depend in completed for depend in src.depends])
):
return True
else:
@ -580,7 +578,7 @@ class CompleteModelGroups(HDF5Map):
src.model is not None
and src.source.h5_type == "group"
and src.neurodata_type != "NWBFile"
and all([depend in completed.keys() for depend in src.depends])
and all([depend in completed for depend in src.depends])
):
return True
else:
@ -642,7 +640,7 @@ class CompleteNWBFile(HDF5Map):
cls, src: H5ReadResult, provider: SchemaProvider, completed: Dict[str, H5ReadResult]
) -> bool:
if src.neurodata_type == "NWBFile" and all(
[depend in completed.keys() for depend in src.depends]
[depend in completed for depend in src.depends]
):
return True
else:
@ -661,7 +659,7 @@ class CompleteNWBFile(HDF5Map):
datetime.datetime.fromisoformat(ts.decode("utf-8"))
for ts in res["file_create_date"]["array"][:]
]
if "stimulus" not in res.keys():
if "stimulus" not in res:
res["stimulus"] = provider.get_class("core", "NWBFileStimulus")()
electrode_groups = []
egroup_keys = list(res["general"].get("extracellular_ephys", {}).keys())
@ -830,8 +828,8 @@ def flatten_hdf(h5f: h5py.File | h5py.Group, skip="specifications") -> Dict[str,
# depends = depends,
h5_type=h5_type,
attrs=attrs,
namespace=attrs.get("namespace", None),
neurodata_type=attrs.get("neurodata_type", None),
namespace=attrs.get("namespace"),
neurodata_type=attrs.get("neurodata_type"),
)
h5f.visititems(_itemize)
@ -895,7 +893,7 @@ def resolve_references(
res = {}
for path, item in src.items():
if isinstance(item, HDF5_Path):
other_item = completed.get(item, None)
other_item = completed.get(item)
if other_item is None:
errors.append(f"Couldn't find: {item}")
res[path] = other_item.result

View file

@ -2,19 +2,16 @@
Mapping functions for handling HDMF classes like DynamicTables
"""
from typing import List, Type, Optional, Any
import warnings
from typing import Any, List, Optional, Type
import h5py
import nptyping
from pydantic import create_model, BaseModel
import numpy as np
import dask.array as da
import h5py
import numpy as np
from pydantic import BaseModel, create_model
from nwb_linkml.maps.dtype import struct_from_dtype
from nwb_linkml.types.hdf5 import HDF5_Path
from nwb_linkml.types.ndarray import NDArray, NDArrayProxy
from nwb_linkml.maps.dtype import flat_to_nptyping, struct_from_dtype
def model_from_dynamictable(group: h5py.Group, base: Optional[BaseModel] = None) -> Type[BaseModel]:

View file

@ -1,5 +1,5 @@
from typing import Any
from abc import ABC, abstractmethod
from typing import Any
class Map(ABC):

View file

@ -1,4 +1,3 @@
import pdb
import re
from pathlib import Path
@ -53,14 +52,14 @@ def relative_path(target: Path, origin: Path):
def _relative_path(target: Path, origin: Path):
try:
return Path(target).resolve().relative_to(Path(origin).resolve())
except ValueError as e: # target does not start with origin
except ValueError: # target does not start with origin
# recursion with origin (eventually origin is root so try will succeed)
return Path("..").joinpath(_relative_path(target, Path(origin).parent))
try:
successful = Path(target).resolve().relative_to(Path(origin).resolve())
return successful
except ValueError as e: # target does not start with origin
except ValueError: # target does not start with origin
# recursion with origin (eventually origin is root so try will succeed)
relative = Path("..").joinpath(_relative_path(target, Path(origin).parent))
# remove the first '..' because this thing freaking double counts

View file

@ -2,13 +2,11 @@
Maps to change the loaded .yaml from nwb schema before it's given to the nwb_schema_language models
"""
import ast
import re
from dataclasses import dataclass
from enum import StrEnum
from typing import Optional, ClassVar, List
import re
import ast
from nwb_linkml.maps import Map
from typing import ClassVar, List, Optional
class SCOPE_TYPES(StrEnum):

View file

@ -12,13 +12,12 @@ def patch_npytyping_perf():
References:
- https://github.com/ramonhagenaars/nptyping/issues/110
"""
from nptyping import ndarray
from nptyping.pandas_ import dataframe
from nptyping import recarray
from nptyping import base_meta_classes
import inspect
from types import FrameType
from nptyping import base_meta_classes, ndarray, recarray
from nptyping.pandas_ import dataframe
# make a new __module__ methods for the affected classes
def new_module_ndarray(cls) -> str:
return cls._get_module(inspect.currentframe(), "nptyping.ndarray")
@ -63,13 +62,13 @@ def patch_schemaview():
Returns:
"""
from typing import List
from functools import lru_cache
from linkml_runtime.utils.schemaview import SchemaView
from typing import List
from linkml_runtime.linkml_model import SchemaDefinitionName
from linkml_runtime.utils.schemaview import SchemaView
@lru_cache()
@lru_cache
def imports_closure(
self, imports: bool = True, traverse=True, inject_metadata=True
) -> List[SchemaDefinitionName]:

View file

@ -2,17 +2,17 @@
Various visualization routines, mostly to help development for now
"""
from typing import TYPE_CHECKING, Optional, List, TypedDict
from rich import print
import random
from typing import TYPE_CHECKING, List, Optional, TypedDict
from dash import Dash, html
import dash_cytoscape as cyto
from dash import Dash, html
from rich import print
cyto.load_extra_layouts()
from nwb_schema_language import Namespace, Group, Dataset
from nwb_linkml.io import load_nwb_core
from nwb_schema_language import Dataset, Group, Namespace
if TYPE_CHECKING:
from nwb_linkml.adapters import NamespacesAdapter

View file

@ -1 +1 @@
from nwb_linkml.providers.schema import LinkMLProvider, SchemaProvider, PydanticProvider
from nwb_linkml.providers.schema import LinkMLProvider, PydanticProvider, SchemaProvider

View file

@ -2,15 +2,14 @@
Define and manage NWB namespaces in external repositories
"""
import pdb
from typing import Optional, Dict, List
import shutil
import subprocess
import tempfile
import warnings
from pathlib import Path
import tempfile
import subprocess
import shutil
from typing import List, Optional
from pydantic import BaseModel, HttpUrl, FilePath, DirectoryPath, Field
from pydantic import BaseModel, DirectoryPath, Field, HttpUrl
from nwb_linkml.config import Config

View file

@ -41,35 +41,29 @@ so eg. for the linkML and pydantic providers:
"""
import pdb
import shutil
from pprint import pformat
from typing import Dict, TypedDict, List, Optional, Literal, TypeVar, Any, Dict, Type, Callable
from types import ModuleType
from pathlib import Path
import os
from abc import abstractmethod, ABC
from importlib.abc import MetaPathFinder
import warnings
import importlib
import os
import shutil
import sys
from abc import ABC, abstractmethod
from importlib.abc import MetaPathFinder
from pathlib import Path
from types import ModuleType
from typing import Any, Dict, List, Optional, Type, TypedDict, TypeVar
from linkml_runtime import SchemaView
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import SchemaDefinition, SchemaDefinitionName
from pydantic import BaseModel
from linkml_runtime.linkml_model import SchemaDefinition, SchemaDefinitionName
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime import SchemaView
from nwb_linkml.config import Config
from nwb_linkml import io
from nwb_linkml import adapters
from nwb_linkml import adapters, io
from nwb_linkml.adapters.adapter import BuildResult
from nwb_linkml.maps.naming import module_case, version_module_case, relative_path
from nwb_schema_language import Namespaces
from nwb_linkml.config import Config
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.maps.naming import module_case, relative_path, version_module_case
from nwb_linkml.providers.git import DEFAULT_REPOS
from nwb_linkml.ui import AdapterProgress
from nwb_schema_language import Namespaces
P = TypeVar("P")
@ -212,7 +206,7 @@ class Provider(ABC):
for ns_dir in builtin_namespaces + tmp_paths:
if not ns_dir.is_dir():
continue
if ns_dir.name not in versions.keys():
if ns_dir.name not in versions:
versions[ns_dir.name] = []
versions[ns_dir.name].extend([v for v in ns_dir.iterdir() if v.is_dir()])
@ -589,7 +583,7 @@ class PydanticProvider(Provider):
def _build_unsplit(self, path, versions, default_kwargs, dump, out_file, force):
if out_file.exists() and not force:
with open(out_file, "r") as ofile:
with open(out_file) as ofile:
serialized = ofile.read()
return serialized

View file

@ -13,22 +13,20 @@ Pydantic models that behave like pandas dataframes
"""
import ast
import pdb
from typing import List, Any, get_origin, get_args, Union, Optional, Dict, Type
from types import NoneType
from typing import Any, Dict, Optional, Type
import h5py
import numpy as np
import pandas as pd
from pydantic import (
BaseModel,
model_serializer,
SerializerFunctionWrapHandler,
ConfigDict,
SerializerFunctionWrapHandler,
model_serializer,
model_validator,
)
from nwb_linkml.maps.hdmf import model_from_dynamictable, dereference_reference_vector
from nwb_linkml.maps.hdmf import dereference_reference_vector, model_from_dynamictable
from nwb_linkml.types.hdf5 import HDF5_Path

View file

@ -1,6 +1,7 @@
from typing import Any
from pydantic_core import CoreSchema, core_schema
from pydantic import GetCoreSchemaHandler
from pydantic_core import CoreSchema, core_schema
class HDF5_Path(str):

View file

@ -5,31 +5,23 @@ Extension of nptyping NDArray for pydantic that allows for JSON-Schema serializa
"""
import base64
import pdb
from pathlib import Path
from typing import Any, Callable, Tuple
import sys
from copy import copy
from functools import reduce
from operator import or_
from pathlib import Path
from typing import Any, Callable
import nptyping.structure
from pydantic_core import core_schema
from pydantic import BaseModel, GetJsonSchemaHandler, ValidationError, GetCoreSchemaHandler
from pydantic.json_schema import JsonSchemaValue
import numpy as np
import h5py
from dask.array.core import Array as DaskArray
import blosc2
import h5py
import nptyping.structure
import numpy as np
from dask.array.core import Array as DaskArray
from nptyping import NDArray as _NDArray
from nptyping.ndarray import NDArrayMeta as _NDArrayMeta
from nptyping import Shape, Number
from nptyping.nptyping_type import NPTypingType
from nptyping.shape_expression import check_shape
from pydantic_core import core_schema
from nwb_linkml.maps.dtype import np_to_python, allowed_precisions
from nwb_linkml.maps.dtype import allowed_precisions, np_to_python
def _list_of_lists_schema(shape, array_type_handler):
@ -196,7 +188,7 @@ class NDArrayProxy:
return obj[slice]
def __setitem__(self, slice, value):
raise NotImplementedError(f"Can't write into an arrayproxy yet!")
raise NotImplementedError("Can't write into an arrayproxy yet!")
@classmethod
def __get_pydantic_core_schema__(

View file

@ -6,7 +6,7 @@ from typing import TYPE_CHECKING
from rich.live import Live
from rich.panel import Panel
from rich.progress import Progress, SpinnerColumn, BarColumn, TextColumn, Column
from rich.progress import BarColumn, Column, Progress, SpinnerColumn, TextColumn
if TYPE_CHECKING:
from nwb_linkml.adapters.namespaces import NamespacesAdapter
@ -42,7 +42,7 @@ class AdapterProgress:
)
self.panel = Panel(
self.progress, title=f"Building Namespaces", border_style="green", padding=(2, 2)
self.progress, title="Building Namespaces", border_style="green", padding=(2, 2)
)
def update(self, namespace: str, **kwargs):

View file

@ -1,22 +1,21 @@
import pytest
import os
from typing import NamedTuple, Optional, List, Dict
import shutil
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Optional
import pytest
from linkml_runtime.dumpers import yaml_dumper
from nwb_linkml.io import schema as io
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_schema_language import Schema, Group, Dataset, Attribute
from linkml_runtime.linkml_model import (
SchemaDefinition,
ClassDefinition,
SlotDefinition,
Prefix,
SchemaDefinition,
SlotDefinition,
TypeDefinition,
)
import shutil
from pathlib import Path
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.io import schema as io
from nwb_schema_language import Attribute, Dataset, Group
@pytest.fixture(scope="session")

View file

@ -1,20 +1,15 @@
import pdb
import numpy as np
import pytest
from ..fixtures import nwb_core_fixture
from linkml_runtime.linkml_model import (
SchemaDefinition,
ClassDefinition,
SchemaDefinition,
SlotDefinition,
TypeDefinition,
)
from nwb_schema_language import Dataset, Group, Schema, CompoundDtype, Attribute
from nwb_linkml.adapters import BuildResult
from ..fixtures import linkml_schema_bare
from nwb_schema_language import Attribute, Dataset, Group, Schema
def test_walk(nwb_core_fixture):

View file

@ -1,12 +1,9 @@
import pdb
import pytest
from ..fixtures import linkml_schema_bare, linkml_schema, nwb_schema
from linkml_runtime.linkml_model import SlotDefinition
from nwb_linkml.adapters import DatasetAdapter, ClassAdapter, GroupAdapter
from nwb_schema_language import Group, Dataset, ReferenceDtype, CompoundDtype
from nwb_linkml.adapters import DatasetAdapter, GroupAdapter
from nwb_schema_language import CompoundDtype, Dataset, Group, ReferenceDtype
def test_build_base(nwb_schema):

View file

@ -1,22 +1,11 @@
import pdb
import pytest
from ..fixtures import nwb_core_fixture
from nwb_schema_language import Namespaces, Namespace, Dataset, Group, Schema
from linkml_runtime.dumpers import yaml_dumper
import yaml
from nwb_linkml.adapters import DatasetAdapter
from nwb_linkml.adapters.dataset import (
MapScalar,
MapListlike,
MapArraylike,
MapNVectors,
Map1DVector,
MapScalarAttributes,
MapArrayLikeAttributes,
)
from nwb_schema_language import Dataset
def test_nothing(nwb_core_fixture):

View file

@ -1,3 +1 @@
import pytest
from ..fixtures import nwb_core_fixture

View file

@ -1,8 +1,6 @@
import pdb
import pytest
from ..fixtures import nwb_core_fixture
from nwb_schema_language import Dataset, Group
from nwb_linkml.adapters import SchemaAdapter

View file

@ -1,7 +1,4 @@
import pytest
from ..fixtures import nwb_core_fixture
from nwb_schema_language import Dataset, Group, Schema
@pytest.mark.parametrize(["schema_name"], [["core.nwb.file"]])

View file

@ -1,7 +1,8 @@
from types import NoneType
from typing import List
import pytest
from typing import List, Optional, Union
from types import NoneType
from nwb_linkml.annotations import get_inner_types

View file

@ -1,8 +1,7 @@
import pytest
import tempfile
from pathlib import Path
import os
import shutil
import tempfile
from pathlib import Path
from nwb_linkml.config import Config

View file

@ -6,19 +6,15 @@ to ensure that the basics of the whole thing operate -- not doing any actual dat
here.
"""
import pdb
from pathlib import Path
from typing import Dict
import pytest
import warnings
from .fixtures import nwb_core_fixture, tmp_output_dir
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import SchemaDefinition
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from linkml_runtime.loaders.yaml_loader import YAMLLoader
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.lang_elements import NwbLangSchema

View file

@ -5,29 +5,23 @@ Note that since this is largely a subclass, we don't test all of the functionali
because it's tested in the base linkml package.
"""
import pdb
import re
import sys
import typing
import pytest
from typing import TypedDict, Optional
from types import ModuleType
import re
from typing import Optional, TypedDict
import numpy as np
import pytest
from pydantic import BaseModel
from ..fixtures import (
tmp_output_dir,
tmp_output_dir_mod,
linkml_schema,
TestSchemas,
linkml_schema_bare,
)
from nwb_linkml.generators.pydantic import NWBPydanticGenerator, compile_python
from nwb_linkml.types.ndarray import NDArrayMeta
from ..fixtures import (
TestSchemas,
)
class TestModules(TypedDict):
core: ModuleType

View file

@ -1,14 +1,10 @@
import pdb
import h5py
import pytest
from pathlib import Path
import numpy as np
import pytest
from ..fixtures import tmp_output_dir, data_dir
from nwb_linkml.io.hdf5 import HDF5IO
from nwb_linkml.io.hdf5 import truncate_file
from nwb_linkml.io.hdf5 import HDF5IO, truncate_file
@pytest.mark.xfail()
@ -92,7 +88,6 @@ def test_truncate_file(tmp_output_dir):
@pytest.mark.skip()
def test_flatten_hdf():
from nwb_linkml.io.hdf5 import HDF5IO
from nwb_linkml.maps.hdf5 import flatten_hdf
path = "/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb"

View file

@ -1,5 +1,6 @@
import numpy as np
import nptyping
import numpy as np
from nwb_linkml.maps.dtype import struct_from_dtype

View file

@ -1,11 +1,10 @@
import pdb
import pytest
import h5py
import time
from nwb_linkml.maps.hdmf import model_from_dynamictable, dynamictable_to_model
from ..fixtures import data_dir
import h5py
import pytest
from nwb_linkml.maps.hdmf import dynamictable_to_model, model_from_dynamictable
NWBFILE = "/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb"

View file

@ -1,9 +1,10 @@
import pytest
import os
import tempfile
from pathlib import Path
import yaml
from yaml import CDumper as Dumper
from pathlib import Path
from nwb_linkml.io.schema import load_yaml

View file

@ -1,8 +1,9 @@
import pytest
import shutil
import pytest
import yaml
from nwb_linkml.providers.git import GitRepo, NWB_CORE_REPO, HDMF_COMMON_REPO
from nwb_linkml.providers.git import HDMF_COMMON_REPO, NWB_CORE_REPO, GitRepo
from nwb_schema_language import Namespaces
@ -34,7 +35,7 @@ def test_gitrepo(source, commit):
repo.clone()
# check that the namespace file exists and has some expected fields
assert repo.namespace_file.exists()
with open(repo.namespace_file, "r") as nsfile:
with open(repo.namespace_file) as nsfile:
ns = yaml.safe_load(nsfile)
# correct model instantiation confirms the repo was cloned successfully
ns_model = Namespaces(**ns)

View file

@ -1,25 +1,16 @@
import pdb
import shutil
import os
import sys
import warnings
from pathlib import Path
import yaml
from pprint import pformat
from typing import Optional, Union, List
from ..fixtures import tmp_output_dir
from typing import Optional
import pytest
from nptyping import Shape, UByte
from nwb_linkml.providers.schema import LinkMLProvider, PydanticProvider
import nwb_linkml
from nwb_linkml.maps.naming import version_module_case
from nwb_linkml.providers.git import DEFAULT_REPOS
from nwb_linkml.adapters import NamespacesAdapter
from nwb_linkml.providers.schema import LinkMLProvider, PydanticProvider
from nwb_linkml.types.ndarray import NDArray
from nptyping import Shape, UByte
CORE_MODULES = (
"core.nwb.base",

View file

@ -1,7 +1,7 @@
import pytest
from typing import List, Optional
import pytest
from pydantic import BaseModel, ValidationError
from typing import List, Union, Optional
@pytest.mark.skip()
@ -10,6 +10,7 @@ def test_df():
Dataframe class should behave like both a pydantic model and a dataframe
"""
import pandas as pd
from nwb_linkml.types.df import DataFrame
class MyDf(DataFrame):

View file

@ -1,17 +1,13 @@
import pdb
from typing import Union, Optional, Any
import json
from typing import Any, Optional, Union
import pytest
import numpy as np
import h5py
import numpy as np
import pytest
from nptyping import Number, Shape
from pydantic import BaseModel, Field, ValidationError
from pydantic import BaseModel, ValidationError, Field
from nwb_linkml.types.ndarray import NDArray, NDArrayProxy
from nptyping import Shape, Number
from ..fixtures import data_dir, tmp_output_dir, tmp_output_dir_func
def test_ndarray_type():

View file

@ -3,16 +3,16 @@ from typing import List, Union
try:
from .datamodel.nwb_schema_pydantic import (
Attribute,
CompoundDtype,
Dataset,
FlatDtype,
Group,
Link,
Namespace,
Namespaces,
Schema,
Group,
Attribute,
Link,
Dataset,
ReferenceDtype,
CompoundDtype,
FlatDtype,
Schema,
)
DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype]

View file

@ -1,4 +1,4 @@
from importlib.metadata import version, PackageNotFoundError
from importlib.metadata import PackageNotFoundError, version
try:
__version__ = version(__name__)

View file

@ -1,4 +1,5 @@
from pathlib import Path
from linkml_runtime.utils.schemaview import SchemaView
SCHEMA_FILE = Path(__file__).parent.parent.resolve() / "schema" / "nwb_schema_language.yaml"

View file

@ -2,13 +2,13 @@
Patching the source code at different stages of the code generation process
"""
import argparse
import pprint
import re
from dataclasses import dataclass
from enum import StrEnum
from pathlib import Path
from typing import ClassVar, List
import re
import argparse
import pprint
class Phases(StrEnum):
@ -89,7 +89,7 @@ def run_patches(phase: Phases, verbose: bool = False):
if verbose:
print("Patching:")
pprint.pprint(patch)
with open(patch.path, "r") as pfile:
with open(patch.path) as pfile:
string = pfile.read()
string = re.sub(patch.match, patch.replacement, string)
with open(patch.path, "w") as pfile: