assorted cleanings but i am tired

This commit is contained in:
sneakers-the-rat 2024-07-19 21:28:24 -07:00
parent 43187bfa3e
commit 974058ce5f
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
9 changed files with 67 additions and 106 deletions

View file

@ -405,7 +405,7 @@ files = [
[[package]]
name = "dask"
version = "2024.7.0"
version = "2024.7.1"
requires_python = ">=3.9"
summary = "Parallel PyData with Task Scheduling"
groups = ["default", "dev", "tests"]
@ -420,8 +420,8 @@ dependencies = [
"toolz>=0.10.0",
]
files = [
{file = "dask-2024.7.0-py3-none-any.whl", hash = "sha256:0f30f218a1fe1c8e9a6ba8add1207088ba9ff049098d4ea4ce045fd5ff7ca914"},
{file = "dask-2024.7.0.tar.gz", hash = "sha256:0060bae9a58b5b3ce7e0d97040e903b4d3db09ba49222101cfc40f9834a8a6bc"},
{file = "dask-2024.7.1-py3-none-any.whl", hash = "sha256:dd046840050376c317de90629db5c6197adda820176cf3e2df10c3219d11951f"},
{file = "dask-2024.7.1.tar.gz", hash = "sha256:dbaef2d50efee841a9d981a218cfeb50392fc9a95e0403b6d680450e4f50d531"},
]
[[package]]
@ -1234,7 +1234,7 @@ files = [
[[package]]
name = "prefixmaps"
version = "0.2.4"
version = "0.2.5"
requires_python = "<4.0,>=3.8"
summary = "A python library for retrieving semantic prefix maps"
groups = ["default", "dev", "tests"]
@ -1243,8 +1243,8 @@ dependencies = [
"pyyaml>=5.3.1",
]
files = [
{file = "prefixmaps-0.2.4-py3-none-any.whl", hash = "sha256:89bf0e6fb08c276f754f9624c42adf2e87c64ee92a3dde1f7eff01f22d85b512"},
{file = "prefixmaps-0.2.4.tar.gz", hash = "sha256:ae86a1b31189d0516d199756d5808f75f44b39e86546c356cc78c0fe8d2078af"},
{file = "prefixmaps-0.2.5-py3-none-any.whl", hash = "sha256:68caa04b3a6a8e058aa1c55affe32c62e44b564d031d63f768e267b796a1f3ee"},
{file = "prefixmaps-0.2.5.tar.gz", hash = "sha256:aaccd2425ade2ea97a502c58be49fe8f3536e3d5e919712ae0358a39fc800799"},
]
[[package]]
@ -1861,40 +1861,40 @@ files = [
[[package]]
name = "ruff"
version = "0.5.2"
version = "0.5.3"
requires_python = ">=3.7"
summary = "An extremely fast Python linter and code formatter, written in Rust."
groups = ["dev"]
files = [
{file = "ruff-0.5.2-py3-none-linux_armv6l.whl", hash = "sha256:7bab8345df60f9368d5f4594bfb8b71157496b44c30ff035d1d01972e764d3be"},
{file = "ruff-0.5.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1aa7acad382ada0189dbe76095cf0a36cd0036779607c397ffdea16517f535b1"},
{file = "ruff-0.5.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:aec618d5a0cdba5592c60c2dee7d9c865180627f1a4a691257dea14ac1aa264d"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b62adc5ce81780ff04077e88bac0986363e4a3260ad3ef11ae9c14aa0e67ef"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc42ebf56ede83cb080a50eba35a06e636775649a1ffd03dc986533f878702a3"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15c6e9f88c67ffa442681365d11df38afb11059fc44238e71a9d9f1fd51de70"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d3de9a5960f72c335ef00763d861fc5005ef0644cb260ba1b5a115a102157251"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe5a968ae933e8f7627a7b2fc8893336ac2be0eb0aace762d3421f6e8f7b7f83"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04f54a9018f75615ae52f36ea1c5515e356e5d5e214b22609ddb546baef7132"},
{file = "ruff-0.5.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed02fb52e3741f0738db5f93e10ae0fb5c71eb33a4f2ba87c9a2fa97462a649"},
{file = "ruff-0.5.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3cf8fe659f6362530435d97d738eb413e9f090e7e993f88711b0377fbdc99f60"},
{file = "ruff-0.5.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:237a37e673e9f3cbfff0d2243e797c4862a44c93d2f52a52021c1a1b0899f846"},
{file = "ruff-0.5.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2a2949ce7c1cbd8317432ada80fe32156df825b2fd611688814c8557824ef060"},
{file = "ruff-0.5.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:481af57c8e99da92ad168924fd82220266043c8255942a1cb87958b108ac9335"},
{file = "ruff-0.5.2-py3-none-win32.whl", hash = "sha256:f1aea290c56d913e363066d83d3fc26848814a1fed3d72144ff9c930e8c7c718"},
{file = "ruff-0.5.2-py3-none-win_amd64.whl", hash = "sha256:8532660b72b5d94d2a0a7a27ae7b9b40053662d00357bb2a6864dd7e38819084"},
{file = "ruff-0.5.2-py3-none-win_arm64.whl", hash = "sha256:73439805c5cb68f364d826a5c5c4b6c798ded6b7ebaa4011f01ce6c94e4d5583"},
{file = "ruff-0.5.2.tar.gz", hash = "sha256:2c0df2d2de685433794a14d8d2e240df619b748fbe3367346baa519d8e6f1ca2"},
{file = "ruff-0.5.3-py3-none-linux_armv6l.whl", hash = "sha256:b12424d9db7347fa63c5ed9af010003338c63c629fb9c9c6adb2aa4f5699729b"},
{file = "ruff-0.5.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8d72c5684bbd4ed304a9a955ee2e67f57b35f6193222ade910cca8a805490e3"},
{file = "ruff-0.5.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d2fc2cdb85ccac1e816cc9d5d8cedefd93661bd957756d902543af32a6b04a71"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4bc751240b2fab5d19254571bcacb315c7b0b00bf3c912d52226a82bbec073"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc697ec874fdd7c7ba0a85ec76ab38f8595224868d67f097c5ffc21136e72fcd"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e791d34d3557a3819b3704bc1f087293c821083fa206812842fa363f6018a192"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:76bb5a87fd397520b91a83eae8a2f7985236d42dd9459f09eef58e7f5c1d8316"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8cfc7a26422c78e94f1ec78ec02501bbad2df5834907e75afe474cc6b83a8c1"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96066c4328a49fce2dd40e80f7117987369feec30ab771516cf95f1cc2db923c"},
{file = "ruff-0.5.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfe9ab5bdc0b08470c3b261643ad54ea86edc32b64d1e080892d7953add3ad"},
{file = "ruff-0.5.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7704582a026fa02cca83efd76671a98ee6eb412c4230209efe5e2a006c06db62"},
{file = "ruff-0.5.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:08058d077e21b856d32ebf483443390e29dc44d927608dc8f092ff6776519da9"},
{file = "ruff-0.5.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77d49484429ed7c7e6e2e75a753f153b7b58f875bdb4158ad85af166a1ec1822"},
{file = "ruff-0.5.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:642cbff6cbfa38d2566d8db086508d6f472edb136cbfcc4ea65997745368c29e"},
{file = "ruff-0.5.3-py3-none-win32.whl", hash = "sha256:eafc45dd8bdc37a00b28e68cc038daf3ca8c233d73fea276dcd09defb1352841"},
{file = "ruff-0.5.3-py3-none-win_amd64.whl", hash = "sha256:cbaec2ddf4f78e5e9ecf5456ea0f496991358a1d883862ed0b9e947e2b6aea93"},
{file = "ruff-0.5.3-py3-none-win_arm64.whl", hash = "sha256:05fbd2cb404775d6cd7f2ff49504e2d20e13ef95fa203bd1ab22413af70d420b"},
{file = "ruff-0.5.3.tar.gz", hash = "sha256:2a3eb4f1841771fa5b67a56be9c2d16fd3cc88e378bd86aaeaec2f7e6bcdd0a2"},
]
[[package]]
name = "setuptools"
version = "71.0.1"
version = "71.0.4"
requires_python = ">=3.8"
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
groups = ["dev", "tests"]
files = [
{file = "setuptools-71.0.1-py3-none-any.whl", hash = "sha256:1eb8ef012efae7f6acbc53ec0abde4bc6746c43087fd215ee09e1df48998711f"},
{file = "setuptools-71.0.1.tar.gz", hash = "sha256:c51d7fd29843aa18dad362d4b4ecd917022131425438251f4e3d766c964dd1ad"},
{file = "setuptools-71.0.4-py3-none-any.whl", hash = "sha256:ed2feca703be3bdbd94e6bb17365d91c6935c6b2a8d0bb09b66a2c435ba0b1a5"},
{file = "setuptools-71.0.4.tar.gz", hash = "sha256:48297e5d393a62b7cb2a10b8f76c63a73af933bd809c9e0d0d6352a1a0135dd8"},
]
[[package]]

View file

@ -38,7 +38,7 @@ class ClassAdapter(Adapter):
Cast from YAML string to desired class
"""
if isinstance(value, str):
from nwb_linkml.io.schema import load_yaml
from nwb_linkml.io.yaml import load_yaml
value = load_yaml(value)
value = cls.TYPE(**value)

View file

@ -83,6 +83,10 @@ class NWBPydanticGenerator(PydanticGenerator):
default_factory=lambda: [ArrayRepresentation.NUMPYDANTIC]
)
black: bool = True
inlined: bool = True
emit_metadata: bool = True,
gen_classvars: bool = True,
gen_slots: bool = True,
def _check_anyof(
self, s: SlotDefinition, sn: SlotDefinitionName, sv: SchemaView

View file

@ -6,38 +6,16 @@ from pathlib import Path
from pprint import pprint
from typing import Optional
import yaml
from linkml_runtime.loaders import yaml_loader
from nwb_linkml.adapters.namespaces import NamespacesAdapter
from nwb_linkml.adapters.schema import SchemaAdapter
from nwb_linkml.io.yaml import load_yaml
from nwb_linkml.maps.postload import apply_postload
from nwb_linkml.providers.git import HDMF_COMMON_REPO, NWB_CORE_REPO, NamespaceRepo
from nwb_schema_language import Dataset, Group, Namespaces
def load_yaml(path: Path | str) -> dict:
"""
Load yaml file from file, applying postload modifications
"""
is_file = False
try:
a_path = Path(path)
if a_path.exists():
is_file = True
except OSError:
# long strings can't be made into paths!
pass
if not is_file:
ns_dict = yaml.safe_load(path)
else:
with open(path) as file:
ns_dict = yaml.safe_load(file)
ns_dict = apply_postload(ns_dict)
return ns_dict
def load_namespaces(path: Path | NamespaceRepo) -> Namespaces:
"""Loads the NWB SCHEMA LANGUAGE namespaces (not the namespacesadapter)"""
if isinstance(path, NamespaceRepo):

View file

@ -7,6 +7,10 @@ import re
from pathlib import Path
from typing import Literal, List, Union, overload
import yaml
from nwb_linkml.maps.postload import apply_postload
@overload
def yaml_peek(key: str, path: Union[str, Path], root:bool = True, first:Literal[True]=True) -> str: ...
@ -61,3 +65,24 @@ def yaml_peek(key: str, path: Union[str, Path], root:bool = True, first:bool=Tru
raise KeyError(f'Key {key} not found in {path}')
def load_yaml(path: Path | str) -> dict:
"""
Load yaml file from file, applying postload modifications
"""
is_file = False
try:
a_path = Path(path)
if a_path.exists():
is_file = True
except OSError:
# long strings can't be made into paths!
pass
if not is_file:
ns_dict = yaml.safe_load(path)
else:
with open(path) as file:
ns_dict = yaml.safe_load(file)
ns_dict = apply_postload(ns_dict)
return ns_dict

View file

@ -12,7 +12,6 @@ from typing import List, Optional, Type
from pydantic import BaseModel
from nwb_linkml import io
from nwb_linkml.io.yaml import yaml_peek
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.maps.naming import module_case, version_module_case
@ -69,9 +68,6 @@ class PydanticProvider(Provider):
NWB -> LinkML schema to load from.
out_file (Optional[Path]): Optionally override the output file. If ``None``,
generate from namespace and version
version (Optional[str]): The version of the schema to build, if present.
Works similarly to ``version`` in :class:`.LinkMLProvider`.
Ignored if ``namespace`` is a Path.
split (bool): If ``False`` (default), generate a single ``namespace.py`` file,
otherwise generate a python file for each schema in the namespace
in addition to a ``namespace.py`` that imports from them
@ -111,23 +107,10 @@ class PydanticProvider(Provider):
fn = module_case(fn) + ".py"
out_file = self.path / name / version / fn
default_kwargs = {
"split": split,
"emit_metadata": True,
"gen_slots": True,
"pydantic_version": "2",
}
default_kwargs.update(kwargs)
# if we weren't given explicit versions to load, figure them out from the namespace
if versions is None:
versions = self._get_dependent_versions(path)
if split:
result = self._build_split(path, versions, default_kwargs, dump, out_file, force)
result = self._build_split(path, dump, out_file, force, **kwargs)
else:
result = self._build_unsplit(path, versions, default_kwargs, dump, out_file, force)
result = self._build_unsplit(path, dump, out_file, force, **kwargs)
self.install_pathfinder()
return result
@ -135,18 +118,17 @@ class PydanticProvider(Provider):
def _build_unsplit(
self,
path: Path,
versions: dict,
default_kwargs: dict,
dump: bool,
out_file: Path,
force: bool,
**kwargs
) -> Optional[str]:
if out_file.exists() and not force:
with open(out_file) as ofile:
serialized = ofile.read()
return serialized
generator = NWBPydanticGenerator(str(path), versions=versions, **default_kwargs)
generator = NWBPydanticGenerator(str(path), **kwargs)
serialized = generator.serialize()
if dump:
out_file.parent.mkdir(parents=True, exist_ok=True)
@ -160,17 +142,16 @@ class PydanticProvider(Provider):
def _build_split(
self,
path: Path,
versions: dict,
default_kwargs: dict,
dump: bool,
out_file: Path,
force: bool,
**kwargs
) -> List[str]:
serialized = []
for schema_file in path.parent.glob("*.yaml"):
this_out = out_file.parent / (module_case(schema_file.stem) + ".py")
serialized.append(
self._build_unsplit(schema_file, versions, default_kwargs, dump, this_out, force)
self._build_unsplit(schema_file, default_kwargs, dump, this_out, force, **kwargs)
)
# If there are dependent versions that also need to be built, do that now!
@ -204,28 +185,6 @@ class PydanticProvider(Provider):
with open(ifile, "w") as ifile_open:
ifile_open.write(" ")
def _get_dependent_versions(self, path: Path) -> dict[str, str]:
"""
For a given namespace schema file, get the versions of any other schemas it imports
Namespace imports will be in the importing schema like:
imports:
-../../hdmf_common/v1_8_0/namespace
-../../hdmf_experimental/v0_5_0/namespace
Returns:
dict[str,str]: A dictionary mapping a namespace to a version number
"""
schema = io.schema.load_yaml(path)
versions = {}
for i in schema["imports"]:
if i.startswith(".."):
import_path = (Path(path).parent / Path(i + ".yaml")).resolve()
imported_schema = io.schema.load_yaml(import_path)
versions[imported_schema["name"]] = imported_schema["version"]
return versions
@classmethod
def module_name(self, namespace: str, version: str) -> str:
"""Module name for the built module

View file

@ -61,10 +61,6 @@ def test_generate_pydantic(tmp_output_dir):
generator = NWBPydanticGenerator(
str(schema_path),
pydantic_version="2",
emit_metadata=True,
gen_classvars=True,
gen_slots=True,
schema_map=preloaded_schema,
)
gen_pydantic = generator.serialize()

View file

@ -5,7 +5,7 @@ from pathlib import Path
import yaml
from yaml import CDumper as Dumper
from nwb_linkml.io.schema import load_yaml
from nwb_linkml.io.yaml import load_yaml
def test_preload_maps():

View file

@ -64,8 +64,7 @@ def test_linkml_build_from_yaml(tmp_output_dir):
res = provider.build_from_yaml(ns_file)
@pytest.mark.skip()
@pytest.mark.depends(on=["test_linkml_provider"])
# @pytest.mark.depends(on=["test_linkml_provider"])
@pytest.mark.parametrize(
["class_name", "test_fields"],
[