remove outdated generator tests, unskip module

This commit is contained in:
sneakers-the-rat 2024-09-30 22:51:56 -07:00
parent ae37db3a41
commit dfeac9e808
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
3 changed files with 52 additions and 210 deletions

View file

@ -7,8 +7,6 @@ See class and module docstrings for details :)
import re
from dataclasses import dataclass, field
from pathlib import Path
from types import ModuleType
from typing import Callable, ClassVar, Dict, List, Optional, Tuple
from linkml.generators import PydanticGenerator
@ -146,7 +144,6 @@ class NWBPydanticGenerator(PydanticGenerator):
cls = AfterGenerateClass.inject_dynamictable(cls)
cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv)
cls = AfterGenerateClass.inject_dynamictable_imports(cls, sv, self._get_element_import)
cls = AfterGenerateClass.strip_vector_data_slots(cls, sv)
return cls
def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule:
@ -157,25 +154,6 @@ class NWBPydanticGenerator(PydanticGenerator):
del template.meta["source_file"]
return template
def compile_module(
self, module_path: Path = None, module_name: str = "test", **kwargs
) -> ModuleType: # pragma: no cover - replaced with provider
"""
Compiles generated python code to a module
:return:
"""
pycode = self.serialize(**kwargs)
if module_path is not None:
module_path = Path(module_path)
init_file = module_path / "__init__.py"
with open(init_file, "w") as ifile:
ifile.write(" ")
try:
return compile_python(pycode, module_path, module_name)
except NameError as e:
raise e
class AfterGenerateSlot:
"""
@ -373,15 +351,6 @@ class AfterGenerateClass:
cls.imports += imp
return cls
@staticmethod
def strip_vector_data_slots(cls: ClassResult, sv: SchemaView) -> ClassResult:
"""
Remove spurious ``vector_data`` slots from DynamicTables
"""
if "vector_data" in cls.cls.attributes:
del cls.cls.attributes["vector_data"]
return cls
def wrap_preserving_optional(annotation: str, wrap: str) -> str:
"""

View file

@ -5,8 +5,10 @@ from typing import Dict, Optional
import pytest
from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import (
from linkml_runtime.linkml_model.meta import (
ArrayExpression,
ClassDefinition,
DimensionExpression,
Prefix,
SchemaDefinition,
SlotDefinition,
@ -88,10 +90,6 @@ def linkml_schema_bare() -> TestSchemas:
equals_string="toplevel",
identifier=True,
),
SlotDefinition(name="array", range="MainTopLevel__Array"),
SlotDefinition(
name="SkippableSlot", description="A slot that was meant to be skipped!"
),
SlotDefinition(
name="inline_dict",
description=(
@ -103,35 +101,42 @@ def linkml_schema_bare() -> TestSchemas:
inlined_as_list=False,
any_of=[{"range": "OtherClass"}, {"range": "StillAnotherClass"}],
),
],
),
ClassDefinition(
name="MainTopLevel__Array",
description="Main class's array",
is_a="Arraylike",
attributes=[
SlotDefinition(name="x", range="numeric", required=True),
SlotDefinition(name="y", range="numeric", required=True),
SlotDefinition(
name="z",
name="value",
description="Main class's array",
range="numeric",
required=False,
maximum_cardinality=3,
minimum_cardinality=3,
),
SlotDefinition(
name="a",
range="numeric",
required=False,
minimum_cardinality=4,
maximum_cardinality=4,
any_of=[
{
"array": ArrayExpression(
dimensions=[
DimensionExpression(alias="x"),
DimensionExpression(alias="y"),
]
)
},
{
"array": ArrayExpression(
dimensions=[
DimensionExpression(alias="x"),
DimensionExpression(alias="y"),
DimensionExpression(alias="z", exact_cardinality=3),
]
)
},
{
"array": ArrayExpression(
dimensions=[
DimensionExpression(alias="x"),
DimensionExpression(alias="y"),
DimensionExpression(alias="z", exact_cardinality=3),
DimensionExpression(alias="a", exact_cardinality=4),
]
)
},
],
),
],
),
ClassDefinition(
name="skippable",
description="A class that lives to be skipped!",
),
ClassDefinition(
name="OtherClass",
description="Another class yno!",

View file

@ -7,7 +7,6 @@ because it's tested in the base linkml package.
# ruff: noqa: F821 - until the tests here settle down
import re
import sys
import typing
from types import ModuleType
@ -16,8 +15,9 @@ from typing import Optional, TypedDict
import numpy as np
import pytest
from numpydantic.ndarray import NDArrayMeta
from pydantic import BaseModel
from numpydantic.dtype import Float
from linkml_runtime.utils.compile_python import compile_python
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from ..fixtures import (
@ -35,7 +35,6 @@ class TestModules(TypedDict):
TestModules.__test__ = False
@pytest.mark.xfail()
def generate_and_import(
linkml_schema: TestSchemas, split: bool, generator_kwargs: Optional[dict] = None
) -> TestModules:
@ -45,7 +44,6 @@ def generate_and_import(
"split": split,
"emit_metadata": True,
"gen_slots": True,
"pydantic_version": "2",
**generator_kwargs,
}
@ -68,20 +66,13 @@ def generate_and_import(
sys.path.append(str(linkml_schema.core_path.parents[1]))
core = compile_python(
str(linkml_schema.core_path.with_suffix(".py")), module_name="test_schema.core"
)
imported = compile_python(
str(linkml_schema.imported_path.with_suffix(".py")), module_name="test_schema.imported"
)
namespace = compile_python(
str(linkml_schema.namespace_path.with_suffix(".py")), module_name="test_schema.namespace"
)
core = compile_python(str(linkml_schema.core_path.with_suffix(".py")))
imported = compile_python(str(linkml_schema.imported_path.with_suffix(".py")))
namespace = compile_python(str(linkml_schema.namespace_path.with_suffix(".py")))
return TestModules(core=core, imported=imported, namespace=namespace, split=split)
@pytest.mark.xfail()
@pytest.fixture(scope="module", params=["split", "unsplit"])
def imported_schema(linkml_schema, request) -> TestModules:
"""
@ -90,97 +81,10 @@ def imported_schema(linkml_schema, request) -> TestModules:
"""
split = request.param == "split"
yield generate_and_import(linkml_schema, split)
del sys.modules["test_schema.core"]
del sys.modules["test_schema.imported"]
del sys.modules["test_schema.namespace"]
return generate_and_import(linkml_schema, split)
def _model_correctness(modules: TestModules):
"""
Shared assertions for model correctness.
Only tests very basic things like type and existence,
more specific tests are in their own test functions!
"""
assert issubclass(modules["core"].MainTopLevel, BaseModel)
assert issubclass(modules["core"].Skippable, BaseModel)
assert issubclass(modules["core"].OtherClass, BaseModel)
assert issubclass(modules["core"].StillAnotherClass, BaseModel)
assert issubclass(modules["imported"].MainThing, BaseModel)
@pytest.mark.xfail()
def test_generate(linkml_schema):
"""
Base case, we can generate pydantic models from linkml schema
Tests basic functionality of serializer including
- serialization
- compilation (loading as a python model)
- existence and correctness of attributes
"""
modules = generate_and_import(linkml_schema, split=False)
assert isinstance(modules["core"], ModuleType)
assert isinstance(modules["imported"], ModuleType)
assert isinstance(modules["namespace"], ModuleType)
_model_correctness(modules)
# unsplit modules should have all the classes present, even if they aren't defined in it
assert modules["core"].MainThing.__module__ == "test_schema.core"
assert issubclass(modules["core"].MainTopLevel, modules["core"].MainThing)
del sys.modules["test_schema.core"]
del sys.modules["test_schema.imported"]
del sys.modules["test_schema.namespace"]
@pytest.mark.xfail()
def test_generate_split(linkml_schema):
"""
We can generate schema split into separate files
"""
modules = generate_and_import(linkml_schema, split=True)
assert isinstance(modules["core"], ModuleType)
assert isinstance(modules["imported"], ModuleType)
assert isinstance(modules["namespace"], ModuleType)
_model_correctness(modules)
# split modules have classes defined once and imported
assert modules["core"].MainThing.__module__ == "test_schema.imported"
# can't assert subclass here because of the weird way relative imports work
# when we don't actually import using normal python import machinery
assert modules["core"].MainTopLevel.__mro__[1].__module__ == "test_schema.imported"
del sys.modules["test_schema.core"]
del sys.modules["test_schema.imported"]
del sys.modules["test_schema.namespace"]
@pytest.mark.xfail()
def test_versions(linkml_schema):
"""
We can use explicit versions that import from relative paths generated by
SchemaProvider
"""
# here all we do is check that we have the correct relative import, since we test
# the actual generation of these path structures elsewhere in the provider tests
core_str = NWBPydanticGenerator(
str(linkml_schema.core_path), versions={"imported": "v4.2.0"}, split=True
).serialize()
# the import should be like
# from ...imported.v4_2_0.imported import (
# MainThing
# )
match = re.findall(r"from \.\.\.imported\.v4_2_0.*?MainThing.*?\)", core_str, flags=re.DOTALL)
assert len(match) == 1
@pytest.mark.xfail()
def test_arraylike(imported_schema):
def test_array(imported_schema):
"""
Arraylike classes are converted to slots that specify nptyping arrays
@ -191,19 +95,18 @@ def test_arraylike(imported_schema):
]] = Field(None)
"""
# check that we have gotten an NDArray annotation and its shape is correct
array = imported_schema["core"].MainTopLevel.model_fields["array"].annotation
array = imported_schema["core"].MainTopLevel.model_fields["value"].annotation
args = typing.get_args(array)
for i, _ in enumerate(("* x, * y", "* x, * y, 3 z", "* x, * y, 3 z, 4 a")):
for i, shape in enumerate(("* x, * y", "* x, * y, 3 z", "* x, * y, 3 z, 4 a")):
assert isinstance(args[i], NDArrayMeta)
assert args[i].__args__[0].__args__
assert args[i].__args__[1] == np.number
assert args[i].__args__[0].__args__[0] == shape
assert args[i].__args__[1] == Float
# we shouldn't have an actual class for the array
assert not hasattr(imported_schema["core"], "MainTopLevel__Array")
assert not hasattr(imported_schema["core"], "MainTopLevelArray")
@pytest.mark.xfail()
def test_inject_fields(imported_schema):
"""
Our root model should have the special fields we injected
@ -213,35 +116,6 @@ def test_inject_fields(imported_schema):
assert "object_id" in base.model_fields
@pytest.mark.xfail()
def test_linkml_meta(imported_schema):
"""
We should be able to store some linkml metadata with our classes
"""
meta = imported_schema["core"].LinkML_Meta
assert "tree_root" in meta.model_fields
assert imported_schema["core"].MainTopLevel.linkml_meta.default.tree_root
assert not imported_schema["core"].OtherClass.linkml_meta.default.tree_root
@pytest.mark.xfail()
def test_skip(linkml_schema):
"""
We can skip slots and classes
"""
modules = generate_and_import(
linkml_schema,
split=False,
generator_kwargs={
"SKIP_SLOTS": ("SkippableSlot",),
"SKIP_CLASSES": ("Skippable", "skippable"),
},
)
assert not hasattr(modules["core"], "Skippable")
assert "SkippableSlot" not in modules["core"].MainTopLevel.model_fields
@pytest.mark.xfail()
def test_inline_with_identifier(imported_schema):
"""
By default, if a class has an identifier attribute, it is inlined
@ -256,7 +130,6 @@ def test_inline_with_identifier(imported_schema):
assert stillanother is imported_schema["core"].StillAnotherClass
@pytest.mark.xfail()
def test_namespace(imported_schema):
"""
Namespace schema import all classes from the other schema
@ -269,23 +142,18 @@ def test_namespace(imported_schema):
("MainThing", "test_schema.imported"),
("Arraylike", "test_schema.imported"),
("MainTopLevel", "test_schema.core"),
("Skippable", "test_schema.core"),
("OtherClass", "test_schema.core"),
("StillAnotherClass", "test_schema.core"),
):
assert hasattr(ns, classname)
if imported_schema["split"]:
assert getattr(ns, classname).__module__ == modname
module_end_name = ".".join(getattr(ns, classname).__module__.split(".")[-2:])
assert module_end_name == modname
@pytest.mark.xfail()
def test_get_set_item(imported_schema):
"""We can get and set without explicitly addressing array"""
cls = imported_schema["core"].MainTopLevel(array=np.array([[1, 2, 3], [4, 5, 6]]))
cls[0] = 50
assert (cls[0] == 50).all()
assert (cls.array[0] == 50).all()
def test_get_item(imported_schema):
"""We can get without explicitly addressing array"""
cls = imported_schema["core"].MainTopLevel(value=np.array([[1, 2, 3], [4, 5, 6]], dtype=float))
assert np.array_equal(cls[0], np.array([1, 2, 3], dtype=float))
cls[1, 1] = 100
assert cls[1, 1] == 100
assert cls.array[1, 1] == 100