enable black preview and automatic linebreaks in long strings

This commit is contained in:
sneakers-the-rat 2024-07-01 22:12:16 -07:00
parent d4a0c82d00
commit 3768e3ce0d
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
18 changed files with 51 additions and 33 deletions

View file

@ -1,12 +1,11 @@
""" """
NWB in LinkML NWB in LinkML
""" """
from nwb_linkml.monkeypatch import apply_patches from nwb_linkml.monkeypatch import apply_patches
apply_patches() apply_patches()
from nwb_linkml.config import Config # noqa: E402 from nwb_linkml.config import Config # noqa: E402
__all__ = [ __all__ = ["Config"]
"Config"
]

View file

@ -30,14 +30,15 @@ from nwb_schema_language import Attribute, Dataset, Group, Schema
T = TypeVar("T", Dataset, Attribute, Schema, Group, BaseModel) T = TypeVar("T", Dataset, Attribute, Schema, Group, BaseModel)
Ts = TypeVarTuple("Ts") Ts = TypeVarTuple("Ts")
Td = TypeVar('Td', bound=Union[Definition,SchemaDefinition,TypeDefinition]) Td = TypeVar("Td", bound=Union[Definition, SchemaDefinition, TypeDefinition])
@dataclass @dataclass
class BuildResult: class BuildResult:
""" """
Container class for propagating nested build results back up to caller Container class for propagating nested build results back up to caller
""" """
# pass
schemas: List[SchemaDefinition] = field(default_factory=list) schemas: List[SchemaDefinition] = field(default_factory=list)
classes: List[ClassDefinition] = field(default_factory=list) classes: List[ClassDefinition] = field(default_factory=list)
slots: List[SlotDefinition] = field(default_factory=list) slots: List[SlotDefinition] = field(default_factory=list)

View file

@ -365,7 +365,8 @@ class DatasetAdapter(ClassAdapter):
if len(matches) > 1: # pragma: no cover if len(matches) > 1: # pragma: no cover
raise RuntimeError( raise RuntimeError(
f"Only one map should apply to a dataset, you need to refactor the maps! Got maps: {matches}" "Only one map should apply to a dataset, you need to refactor the maps! Got maps:"
f" {matches}"
) )
# apply matching maps # apply matching maps
@ -412,7 +413,9 @@ def make_arraylike(cls: Dataset, name: Optional[str] = None) -> ClassDefinition:
slots = [] slots = []
for dims, shape in dims_shape: for dims, shape in dims_shape:
# if there is just a single list of possible dimensions, it's required # if there is just a single list of possible dimensions, it's required
if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]) or all([dims in inner_dim for inner_dim in cls.dims]): if not any([isinstance(inner_dim, list) for inner_dim in cls.dims]) or all(
[dims in inner_dim for inner_dim in cls.dims]
):
required = True required = True
else: else:
required = False required = False

View file

@ -5,7 +5,6 @@ Wraps the :class:`nwb_schema_language.Namespaces` and other objects with conveni
for extracting information and generating translated schema for extracting information and generating translated schema
""" """
from copy import copy from copy import copy
from pathlib import Path from pathlib import Path
from pprint import pformat from pprint import pformat
@ -154,7 +153,8 @@ class NamespacesAdapter(Adapter):
if len(internal_matches) > 1: if len(internal_matches) > 1:
raise KeyError( raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal: {pformat(internal_matches)}\nimported:{pformat(import_matches)}" f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
) )
elif len(internal_matches) == 1: elif len(internal_matches) == 1:
return internal_matches[0] return internal_matches[0]
@ -168,7 +168,8 @@ class NamespacesAdapter(Adapter):
if len(import_matches) > 1: if len(import_matches) > 1:
raise KeyError( raise KeyError(
f"Found multiple schemas in namespace that define {name}:\ninternal: {pformat(internal_matches)}\nimported:{pformat(import_matches)}" f"Found multiple schemas in namespace that define {name}:\ninternal:"
f" {pformat(internal_matches)}\nimported:{pformat(import_matches)}"
) )
elif len(import_matches) == 1: elif len(import_matches) == 1:
return import_matches[0] return import_matches[0]

View file

@ -34,7 +34,10 @@ class SchemaAdapter(Adapter):
) )
version: Optional[str] = Field( version: Optional[str] = Field(
None, None,
description="Version of schema, populated by NamespacesAdapter since individual schema files dont know their version in NWB Schema Lang", description=(
"Version of schema, populated by NamespacesAdapter since individual schema files dont"
" know their version in NWB Schema Lang"
),
) )
_created_classes: List[Type[Group | Dataset]] = PrivateAttr(default_factory=list) _created_classes: List[Type[Group | Dataset]] = PrivateAttr(default_factory=list)
@ -81,7 +84,8 @@ class SchemaAdapter(Adapter):
len(res.slots) > 0 len(res.slots) > 0
): # pragma: no cover - hard to induce this error because the child classes don't fuck up like this ): # pragma: no cover - hard to induce this error because the child classes don't fuck up like this
raise RuntimeError( raise RuntimeError(
"Generated schema in this translation can only have classes, all slots should be attributes within a class" "Generated schema in this translation can only have classes, all slots should be"
" attributes within a class"
) )
sch = SchemaDefinition( sch = SchemaDefinition(

View file

@ -253,7 +253,10 @@ class NWBPydanticGenerator(PydanticGenerator):
SKIP_SLOTS: Tuple[str] = ("",) SKIP_SLOTS: Tuple[str] = ("",)
SKIP_CLASSES: Tuple[str] = ("",) SKIP_CLASSES: Tuple[str] = ("",)
INJECTED_FIELDS: Tuple[str] = ( INJECTED_FIELDS: Tuple[str] = (
'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object is stored in an NWB file")', (
'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object'
' is stored in an NWB file")'
),
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")', 'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
) )
# SKIP_CLASSES=('VectorData','VectorIndex') # SKIP_CLASSES=('VectorData','VectorIndex')

View file

@ -639,9 +639,7 @@ class CompleteNWBFile(HDF5Map):
def check( def check(
cls, src: H5ReadResult, provider: SchemaProvider, completed: Dict[str, H5ReadResult] cls, src: H5ReadResult, provider: SchemaProvider, completed: Dict[str, H5ReadResult]
) -> bool: ) -> bool:
if src.neurodata_type == "NWBFile" and all( if src.neurodata_type == "NWBFile" and all([depend in completed for depend in src.depends]):
[depend in completed for depend in src.depends]
):
return True return True
else: else:
return False return False

View file

@ -20,14 +20,20 @@ class NamespaceRepo(BaseModel):
""" """
name: str = Field( name: str = Field(
description="Short name used to refer to this namespace (usually equivalent to the name field within a namespaces NWB list)" description=(
"Short name used to refer to this namespace (usually equivalent to the name field"
" within a namespaces NWB list)"
)
) )
repository: HttpUrl | DirectoryPath = Field( repository: HttpUrl | DirectoryPath = Field(
description="URL or local absolute path to the root repository" description="URL or local absolute path to the root repository"
) )
path: Path = Field(description="Relative path from the repository root to the namespace file") path: Path = Field(description="Relative path from the repository root to the namespace file")
versions: List[str] = Field( versions: List[str] = Field(
description="Known versions for this namespace repository, correspond to commit hashes or git tags that can be checked out by :class:`.GitRepo`", description=(
"Known versions for this namespace repository, correspond to commit hashes or git tags"
" that can be checked out by :class:`.GitRepo`"
),
default_factory=list, default_factory=list,
) )
@ -248,7 +254,8 @@ class GitRepo:
# Check that the remote matches # Check that the remote matches
if self.remote != str(self.namespace.repository): if self.remote != str(self.namespace.repository):
warnings.warn( warnings.warn(
f'Repository exists, but has the wrong remote URL.\nExpected: {self.namespace.repository}\nGot:{self.remote.strip(".git")}' "Repository exists, but has the wrong remote URL.\nExpected:"
f" {self.namespace.repository}\nGot:{self.remote.strip('.git')}"
) )
return False return False
@ -269,7 +276,8 @@ class GitRepo:
or str(self.temp_directory).startswith(str(Config().git_dir)) or str(self.temp_directory).startswith(str(Config().git_dir))
): ):
warnings.warn( warnings.warn(
"Temp directory is outside of the system temp dir or git directory set by environmental variables, not deleting in case this has been changed by mistake" "Temp directory is outside of the system temp dir or git directory set by"
" environmental variables, not deleting in case this has been changed by mistake"
) )
self._temp_directory = None self._temp_directory = None
return return
@ -293,7 +301,8 @@ class GitRepo:
else: else:
if not self.check(): if not self.check():
warnings.warn( warnings.warn(
"Destination directory is not empty and does not pass checks for correctness! cleaning up" "Destination directory is not empty and does not pass checks for"
" correctness! cleaning up"
) )
self.cleanup() self.cleanup()
else: else:

View file

@ -467,7 +467,8 @@ class LinkMLProvider(Provider):
ns_repo = DEFAULT_REPOS.get(namespace, None) ns_repo = DEFAULT_REPOS.get(namespace, None)
if ns_repo is None: if ns_repo is None:
raise KeyError( raise KeyError(
f"Namespace {namespace} could not be found, and no git repository source has been configured!" f"Namespace {namespace} could not be found, and no git repository source has been"
" configured!"
) )
ns_file = ns_repo.provide_from_git(commit=version) ns_file = ns_repo.provide_from_git(commit=version)
res = self.build_from_yaml(ns_file) res = self.build_from_yaml(ns_file)

View file

@ -117,7 +117,10 @@ def linkml_schema_bare() -> TestSchemas:
), ),
SlotDefinition( SlotDefinition(
name="inline_dict", name="inline_dict",
description="This should be inlined as a dictionary despite this class having an identifier", description=(
"This should be inlined as a dictionary despite this class having"
" an identifier"
),
multivalued=True, multivalued=True,
inlined=True, inlined=True,
inlined_as_list=False, inlined_as_list=False,

View file

@ -1,4 +1,3 @@
import numpy as np import numpy as np
import pytest import pytest
from linkml_runtime.linkml_model import ( from linkml_runtime.linkml_model import (

View file

@ -1,4 +1,3 @@
import pytest import pytest
from linkml_runtime.linkml_model import SlotDefinition from linkml_runtime.linkml_model import SlotDefinition

View file

@ -1,7 +1,3 @@
from nwb_linkml.adapters.dataset import ( from nwb_linkml.adapters.dataset import (
MapScalar, MapScalar,
) )

View file

@ -1,4 +1,3 @@
import pytest import pytest
from nwb_linkml.adapters import SchemaAdapter from nwb_linkml.adapters import SchemaAdapter

View file

@ -1,4 +1,3 @@
import time import time
import h5py import h5py

View file

@ -18,5 +18,6 @@ try:
DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype] DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype]
except (NameError, RecursionError): except (NameError, RecursionError):
warnings.warn( warnings.warn(
"Error importing pydantic classes, passing because we might be in the process of patching them, but it is likely they are broken and you will be unable to use them!" "Error importing pydantic classes, passing because we might be in the process of patching"
" them, but it is likely they are broken and you will be unable to use them!"
) )

View file

@ -510,7 +510,9 @@ class ReftypeOptions(EnumDefinitionImpl):
) )
region = PermissibleValue( region = PermissibleValue(
text="region", text="region",
description="Reference to a region (i.e. subset) of another dataset of the given target_type", description=(
"Reference to a region (i.e. subset) of another dataset of the given target_type"
),
) )
_defn = EnumDefinition( _defn = EnumDefinition(

View file

@ -88,5 +88,6 @@ warn_unreachable = true
[tool.black] [tool.black]
target-version = ['py38', 'py39', 'py310', 'py311'] target-version = ['py38', 'py39', 'py310', 'py311']
enable-unstable-feature = ["string_processing"] enable-unstable-feature = ["string_processing"]
preview = true
include = "nwb_linkml/.*\\.py$|nwb_schema_language/.*\\.py$" include = "nwb_linkml/.*\\.py$|nwb_schema_language/.*\\.py$"
line-length = 100 line-length = 100