make the tests pass again - add validator to do the opposite of coerce_value, try to pass the input as the `value` field of the model. fix test assumptions and model creation.

model update to follow
This commit is contained in:
sneakers-the-rat 2024-09-19 22:43:29 -07:00
parent 734088f18e
commit 8993014832
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
7 changed files with 37 additions and 13 deletions

View file

@ -59,9 +59,7 @@ class MapScalar(DatasetMap):
slots: slots:
- name: MyScalar - name: MyScalar
description: A scalar description: A scalar
multivalued: false
range: int32 range: int32
required: false
""" """

View file

@ -178,7 +178,7 @@ class NamespacesAdapter(Adapter):
nwb-schema-language inheritance doesn't work like normal python inheritance - nwb-schema-language inheritance doesn't work like normal python inheritance -
instead of inheriting everything at the 'top level' of a class, it also instead of inheriting everything at the 'top level' of a class, it also
recursively merges all properties from the parent objects. recursively merges all properties from the parent objects.
While this operation does not take care to modify classes in a way that respect their order While this operation does not take care to modify classes in a way that respect their order
(i.e. roll down ancestor classes first, in order, before the leaf classes), (i.e. roll down ancestor classes first, in order, before the leaf classes),
it doesn't matter - this method should be both idempotent and order insensitive it doesn't matter - this method should be both idempotent and order insensitive
@ -196,8 +196,8 @@ class NamespacesAdapter(Adapter):
# merge and cast # merge and cast
new_cls: dict = {} new_cls: dict = {}
for i, parent in enumerate(parents): for i, parent in enumerate(parents):
# we want a full roll-down of all the ancestor classes, # we want a full roll-down of all the ancestor classes,
# but we make an abbreviated leaf class # but we make an abbreviated leaf class
complete = False if i == len(parents) - 1 else True complete = False if i == len(parents) - 1 else True
new_cls = roll_down_nwb_class(new_cls, parent, complete=complete) new_cls = roll_down_nwb_class(new_cls, parent, complete=complete)
new_cls: Group | Dataset = type(cls)(**new_cls) new_cls: Group | Dataset = type(cls)(**new_cls)

View file

@ -26,6 +26,7 @@ from linkml_runtime.utils.formatutils import remove_empty_items
from linkml_runtime.utils.schemaview import SchemaView from linkml_runtime.utils.schemaview import SchemaView
from nwb_linkml.includes.base import ( from nwb_linkml.includes.base import (
BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD, BASEMODEL_COERCE_CHILD,
BASEMODEL_COERCE_VALUE, BASEMODEL_COERCE_VALUE,
BASEMODEL_GETITEM, BASEMODEL_GETITEM,
@ -55,6 +56,7 @@ class NWBPydanticGenerator(PydanticGenerator):
'object_id: Optional[str] = Field(None, description="Unique UUID for each object")', 'object_id: Optional[str] = Field(None, description="Unique UUID for each object")',
BASEMODEL_GETITEM, BASEMODEL_GETITEM,
BASEMODEL_COERCE_VALUE, BASEMODEL_COERCE_VALUE,
BASEMODEL_CAST_WITH_VALUE,
BASEMODEL_COERCE_CHILD, BASEMODEL_COERCE_CHILD,
) )
split: bool = True split: bool = True

View file

@ -16,7 +16,7 @@ BASEMODEL_GETITEM = """
BASEMODEL_COERCE_VALUE = """ BASEMODEL_COERCE_VALUE = """
@field_validator("*", mode="wrap") @field_validator("*", mode="wrap")
@classmethod @classmethod
def coerce_value(cls, v: Any, handler) -> Any: def coerce_value(cls, v: Any, handler, info) -> Any:
\"\"\"Try to rescue instantiation by using the value field\"\"\" \"\"\"Try to rescue instantiation by using the value field\"\"\"
try: try:
return handler(v) return handler(v)
@ -27,7 +27,29 @@ BASEMODEL_COERCE_VALUE = """
try: try:
return handler(v["value"]) return handler(v["value"])
except (IndexError, KeyError, TypeError): except (IndexError, KeyError, TypeError):
raise e1 raise ValueError(
f"coerce_value: Could not use the value field of {type(v)} "
f"to construct {cls.__name__}.{info.field_name}, "
f"expected type: {cls.model_fields[info.field_name].annotation}"
) from e1
"""
BASEMODEL_CAST_WITH_VALUE = """
@field_validator("*", mode="wrap")
@classmethod
def cast_with_value(cls, v: Any, handler, info) -> Any:
\"\"\"Try to rescue instantiation by casting into the model's value fiel\"\"\"
try:
return handler(v)
except Exception as e1:
try:
return handler({"value": v})
except Exception:
raise ValueError(
f"cast_with_value: Could not cast {type(v)} as value field for "
f"{cls.__name__}.{info.field_name},"
f" expected_type: {cls.model_fields[info.field_name].annotation}"
) from e1
""" """
BASEMODEL_COERCE_CHILD = """ BASEMODEL_COERCE_CHILD = """

View file

@ -54,7 +54,7 @@ def test_walk_field_values(nwb_core_fixture):
text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, "dtype", value="text")) text_models = list(nwb_core_fixture.walk_field_values(nwb_core_fixture, "dtype", value="text"))
assert all([d.dtype == "text" for d in text_models]) assert all([d.dtype == "text" for d in text_models])
# 135 known value from regex search # 135 known value from regex search
assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 135 assert len(text_models) == len([d for d in dtype_models if d.dtype == "text"]) == 155
def test_build_result(linkml_schema_bare): def test_build_result(linkml_schema_bare):

View file

@ -135,8 +135,9 @@ def test_roll_down_inheritance():
child = child_ns_adapter.get("Child") child = child_ns_adapter.get("Child")
# overrides simple attrs # overrides simple attrs
assert child.doc == "child" assert child.doc == "child"
# gets unassigned parent attrs # we don't receive attrs that aren't overridden in the child,
assert "b" in [attr.name for attr in child.attributes] # instead we let python/linkml inheritance handle that for us
assert "b" not in [attr.name for attr in child.attributes]
# overrides values while preserving remaining values when set # overrides values while preserving remaining values when set
attr_a = [attr for attr in child.attributes if attr.name == "a"][0] attr_a = [attr for attr in child.attributes if attr.name == "a"][0]
assert attr_a.value == "z" assert attr_a.value == "z"
@ -146,7 +147,8 @@ def test_roll_down_inheritance():
# preserve unset values in child datasets # preserve unset values in child datasets
assert child.datasets[0].dtype == parent_cls.datasets[0].dtype assert child.datasets[0].dtype == parent_cls.datasets[0].dtype
assert child.datasets[0].dims == parent_cls.datasets[0].dims assert child.datasets[0].dims == parent_cls.datasets[0].dims
# gets undeclared attrs in child datasets # we *do* get undeclared attrs in child datasets,
# since those are not handled by python/linkml inheritance
assert "d" in [attr.name for attr in child.datasets[0].attributes] assert "d" in [attr.name for attr in child.datasets[0].attributes]
# overrides set values in child datasets while preserving unset # overrides set values in child datasets while preserving unset
c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0] c_attr = [attr for attr in child.datasets[0].attributes if attr.name == "c"][0]

View file

@ -114,14 +114,14 @@ def _icephys_stimulus_and_response(
n_samples = generator.integers(20, 50) n_samples = generator.integers(20, 50)
stimulus = VoltageClampStimulusSeries( stimulus = VoltageClampStimulusSeries(
name=f"vcss_{i}", name=f"vcss_{i}",
data=VoltageClampStimulusSeriesData(value=[i] * n_samples), data=VoltageClampStimulusSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}", stimulus_description=f"{i}",
sweep_number=i, sweep_number=i,
electrode=electrode, electrode=electrode,
) )
response = VoltageClampSeries( response = VoltageClampSeries(
name=f"vcs_{i}", name=f"vcs_{i}",
data=VoltageClampSeriesData(value=[i] * n_samples), data=VoltageClampSeriesData(value=np.array([i] * n_samples, dtype=float)),
stimulus_description=f"{i}", stimulus_description=f"{i}",
electrode=electrode, electrode=electrode,
) )