enable black preview and automatic linebreaks in long strings

This commit is contained in:
sneakers-the-rat 2024-07-01 22:12:16 -07:00
parent d4a0c82d00
commit 3768e3ce0d
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
18 changed files with 51 additions and 33 deletions

View file

@ -1,12 +1,11 @@
"""
NWB in LinkML
"""
from nwb_linkml.monkeypatch import apply_patches
apply_patches()
from nwb_linkml.config import Config # noqa: E402
__all__ = [
"Config"
]
__all__ = ["Config"]

View file

@ -30,14 +30,15 @@ from nwb_schema_language import Attribute, Dataset, Group, Schema
T = TypeVar("T", Dataset, Attribute, Schema, Group, BaseModel)
Ts = TypeVarTuple("Ts")
Td = TypeVar('Td', bound=Union[Definition,SchemaDefinition,TypeDefinition])
Td = TypeVar("Td", bound=Union[Definition, SchemaDefinition, TypeDefinition])
@dataclass
class BuildResult:
"""
Container class for propagating nested build results back up to caller
"""
# pass
schemas: List[SchemaDefinition] = field(default_factory=list)
classes: List[ClassDefinition] = field(default_factory=list)
slots: List[SlotDefinition] = field(default_factory=list)

View file

@ -365,7 +365,8 @@ class DatasetAdapter(ClassAdapter):
if len(matches) > 1: # pragma: no cover
raise RuntimeError(
f"Only one map should apply to a dataset, you need to refactor the maps! Got maps: {matches}"
"Only one map should apply to a dataset, you need to refactor the maps! Got maps:"
f" {matches}"
)
# apply matching maps
@ -412,7 +413,9 @@ def make_arraylike(cls: Dataset, name: Optional[str] = None) -> ClassDefinition:
slots = []
for dims, shape in dims_shape:
# if there is just a single list of possible dimensions, it's required
if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]) or all([dims in inner_dim for inner_dim in cls.dims]):
if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]) or all(
[dims in inner_dim for inner_dim in cls.dims]
):
required = True
else:
required = False

View file

@ -5,7 +5,6 @@ Wraps the :class:`nwb_schema_language.Namespaces` and other objects with conveni
for extracting information and generating translated schema
"""
from copy import copy
from pathlib import Path
from pprint import pformat
@ -154,7 +153,8 @@ class NamespacesAdapter(Adapter):
if len(internal_matches) > 1:
raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal: {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
)
elif len(internal_matches) == 1:
return internal_matches[0]
@ -168,7 +168,8 @@ class NamespacesAdapter(Adapter):
if len(import_matches) > 1:
raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal: {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
)
elif len(import_matches) == 1:
return import_matches[0]

View file

@ -34,7 +34,10 @@ class SchemaAdapter(Adapter):
)
version: Optional[str] = Field(
None,
description="Version of schema, populated by NamespacesAdapter since individual schema files dont know their version in NWB Schema Lang",
description=(
"Version of schema, populated by NamespacesAdapter since individual schema files dont"
" know their version in NWB Schema Lang"
),
)
_created_classes: List[Type[Group | Dataset]] = PrivateAttr(default_factory=list)
@ -81,7 +84,8 @@ class SchemaAdapter(Adapter):
len(res.slots) > 0
): # pragma: no cover - hard to induce this error because the child classes don't fuck up like this
raise RuntimeError(
"Generated schema in this translation can only have classes, all slots should be attributes within a class"
"Generated schema in this translation can only have classes, all slots should be"
" attributes within a class"
)
sch = SchemaDefinition(

View file

@ -253,7 +253,10 @@ class NWBPydanticGenerator(PydanticGenerator):
SKIP_SLOTS: Tuple[str] = ("",)
SKIP_CLASSES: Tuple[str] = ("",)
INJECTED_FIELDS: Tuple[str] = (
'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object is stored in an NWB file")',
(
'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object'
' is stored in an NWB file")'
),
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
)
# SKIP_CLASSES=('VectorData','VectorIndex')

View file

@ -639,9 +639,7 @@ class CompleteNWBFile(HDF5Map):
def check(
cls, src: H5ReadResult, provider: SchemaProvider, completed: Dict[str, H5ReadResult]
) -> bool:
if src.neurodata_type == "NWBFile" and all(
[depend in completed for depend in src.depends]
):
if src.neurodata_type == "NWBFile" and all([depend in completed for depend in src.depends]):
return True
else:
return False

View file

@ -20,14 +20,20 @@ class NamespaceRepo(BaseModel):
"""
name: str = Field(
description="Short name used to refer to this namespace (usually equivalent to the name field within a namespaces NWB list)"
description=(
"Short name used to refer to this namespace (usually equivalent to the name field"
" within a namespaces NWB list)"
)
)
repository: HttpUrl | DirectoryPath = Field(
description="URL or local absolute path to the root repository"
)
path: Path = Field(description="Relative path from the repository root to the namespace file")
versions: List[str] = Field(
description="Known versions for this namespace repository, correspond to commit hashes or git tags that can be checked out by :class:`.GitRepo`",
description=(
"Known versions for this namespace repository, correspond to commit hashes or git tags"
" that can be checked out by :class:`.GitRepo`"
),
default_factory=list,
)
@ -248,7 +254,8 @@ class GitRepo:
# Check that the remote matches
if self.remote != str(self.namespace.repository):
warnings.warn(
f'Repository exists, but has the wrong remote URL.\nExpected: {self.namespace.repository}\nGot:{self.remote.strip(".git")}'
"Repository exists, but has the wrong remote URL.\nExpected:"
f" {self.namespace.repository}\nGot:{self.remote.strip('.git')}"
)
return False
@ -269,7 +276,8 @@ class GitRepo:
or str(self.temp_directory).startswith(str(Config().git_dir))
):
warnings.warn(
"Temp directory is outside of the system temp dir or git directory set by environmental variables, not deleting in case this has been changed by mistake"
"Temp directory is outside of the system temp dir or git directory set by"
" environmental variables, not deleting in case this has been changed by mistake"
)
self._temp_directory = None
return
@ -293,7 +301,8 @@ class GitRepo:
else:
if not self.check():
warnings.warn(
"Destination directory is not empty and does not pass checks for correctness! cleaning up"
"Destination directory is not empty and does not pass checks for"
" correctness! cleaning up"
)
self.cleanup()
else:

View file

@ -467,7 +467,8 @@ class LinkMLProvider(Provider):
ns_repo = DEFAULT_REPOS.get(namespace, None)
if ns_repo is None:
raise KeyError(
f"Namespace {namespace} could not be found, and no git repository source has been configured!"
f"Namespace {namespace} could not be found, and no git repository source has been"
" configured!"
)
ns_file = ns_repo.provide_from_git(commit=version)
res = self.build_from_yaml(ns_file)

View file

@ -117,7 +117,10 @@ def linkml_schema_bare() -> TestSchemas:
),
SlotDefinition(
name="inline_dict",
description="This should be inlined as a dictionary despite this class having an identifier",
description=(
"This should be inlined as a dictionary despite this class having"
" an identifier"
),
multivalued=True,
inlined=True,
inlined_as_list=False,

View file

@ -1,4 +1,3 @@
import numpy as np
import pytest
from linkml_runtime.linkml_model import (

View file

@ -1,4 +1,3 @@
import pytest
from linkml_runtime.linkml_model import SlotDefinition

View file

@ -1,7 +1,3 @@
from nwb_linkml.adapters.dataset import (
MapScalar,
)

View file

@ -1,4 +1,3 @@
import pytest
from nwb_linkml.adapters import SchemaAdapter

View file

@ -1,4 +1,3 @@
import time
import h5py

View file

@ -18,5 +18,6 @@ try:
DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype]
except (NameError, RecursionError):
warnings.warn(
"Error importing pydantic classes, passing because we might be in the process of patching them, but it is likely they are broken and you will be unable to use them!"
"Error importing pydantic classes, passing because we might be in the process of patching"
" them, but it is likely they are broken and you will be unable to use them!"
)

View file

@ -510,7 +510,9 @@ class ReftypeOptions(EnumDefinitionImpl):
)
region = PermissibleValue(
text="region",
description="Reference to a region (i.e. subset) of another dataset of the given target_type",
description=(
"Reference to a region (i.e. subset) of another dataset of the given target_type"
),
)
_defn = EnumDefinition(

View file

@ -88,5 +88,6 @@ warn_unreachable = true
[tool.black]
target-version = ['py38', 'py39', 'py310', 'py311']
enable-unstable-feature = ["string_processing"]
preview = true
include = "nwb_linkml/.*\\.py$|nwb_schema_language/.*\\.py$"
line-length = 100