correctly generating rolled down classes. model update to follow

This commit is contained in:
sneakers-the-rat 2024-09-19 19:17:59 -07:00
parent cad57554fd
commit 1d27c6a259
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
6 changed files with 99 additions and 58 deletions

View file

@ -20,6 +20,7 @@ from linkml_runtime.linkml_model import (
from pydantic import BaseModel, PrivateAttr
from nwb_linkml.logging import init_logger
from nwb_linkml.maps.dtype import float_types, integer_types, string_types
from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema
if sys.version_info.minor >= 11:
@ -308,5 +309,48 @@ def has_attrs(cls: Dataset) -> bool:
return (
cls.attributes is not None
and len(cls.attributes) > 0
and all([not a.value for a in cls.attributes])
and any([not a.value for a in cls.attributes])
)
def defaults(cls: Dataset | Attribute) -> dict:
"""
Handle default values -
* If ``value`` is present, yield `equals_string` or `equals_number` depending on dtype
**as well as** an ``ifabsent`` value - we both constrain the possible values to 1
and also supply it as the default
* else, if ``default_value`` is present, yield an appropriate ``ifabsent`` value
* If neither, yield an empty dict
Unlike nwb_schema_language, when ``value`` is set, we yield both a ``equals_*`` constraint
and an ``ifabsent`` constraint, because an ``equals_*`` can be declared without a default
in order to validate that a value is correctly set as the constrained value, and fail
if a value isn't provided.
"""
ret = {}
if cls.value:
if cls.dtype in integer_types:
ret["equals_number"] = cls.value
ret["ifabsent"] = f"integer({cls.value})"
elif cls.dtype in float_types:
ret["equals_number"] = cls.value
ret["ifabsent"] = f"float({cls.value})"
elif cls.dtype in string_types:
ret["equals_string"] = cls.value
ret["ifabsent"] = f"string({cls.value})"
else:
ret["equals_string"] = cls.value
ret["ifabsent"] = cls.value
elif cls.default_value:
if cls.dtype in string_types:
ret["ifabsent"] = f"string({cls.default_value})"
elif cls.dtype in integer_types:
ret["ifabsent"] = f"int({cls.default_value})"
elif cls.dtype in float_types:
ret["ifabsent"] = f"float({cls.default_value})"
else:
ret["ifabsent"] = cls.default_value
return ret

View file

@ -7,26 +7,13 @@ from typing import ClassVar, Optional, Type, TypedDict
from linkml_runtime.linkml_model.meta import SlotDefinition
from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d
from nwb_linkml.adapters.adapter import Adapter, BuildResult, defaults, is_1d
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.maps import Map
from nwb_linkml.maps.dtype import handle_dtype, inlined
from nwb_schema_language import Attribute
def _make_ifabsent(val: str | int | float | None) -> str | None:
if val is None:
return None
elif isinstance(val, str):
return f"string({val})"
elif isinstance(val, int):
return f"integer({val})"
elif isinstance(val, float):
return f"float({val})"
else:
return str(val)
class AttrDefaults(TypedDict):
"""Default fields for an attribute"""
@ -38,31 +25,6 @@ class AttrDefaults(TypedDict):
class AttributeMap(Map):
"""Base class for attribute mapping transformations :)"""
@classmethod
def handle_defaults(cls, attr: Attribute) -> AttrDefaults:
"""
Construct arguments for linkml slot default metaslots from nwb schema lang attribute props
"""
equals_string = None
equals_number = None
default_value = None
if attr.value:
if isinstance(attr.value, (int, float)):
equals_number = attr.value
elif attr.value:
equals_string = str(attr.value)
if equals_number:
default_value = _make_ifabsent(equals_number)
elif equals_string:
default_value = _make_ifabsent(equals_string)
elif attr.default_value:
default_value = _make_ifabsent(attr.default_value)
return AttrDefaults(
equals_string=equals_string, equals_number=equals_number, ifabsent=default_value
)
@classmethod
@abstractmethod
def check(cls, attr: Attribute) -> bool:
@ -105,7 +67,7 @@ class MapScalar(AttributeMap):
description=attr.doc,
required=attr.required,
inlined=inlined(attr.dtype),
**cls.handle_defaults(attr),
**defaults(attr),
)
return BuildResult(slots=[slot])
@ -154,7 +116,7 @@ class MapArray(AttributeMap):
required=attr.required,
inlined=inlined(attr.dtype),
**expressions,
**cls.handle_defaults(attr),
**defaults(attr),
)
return BuildResult(slots=[slot])

View file

@ -7,7 +7,7 @@ from typing import ClassVar, Optional, Type
from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition
from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.adapter import BuildResult, defaults, has_attrs, is_1d, is_compound
from nwb_linkml.adapters.array import ArrayAdapter
from nwb_linkml.adapters.classes import ClassAdapter
from nwb_linkml.maps import QUANTITY_MAP, Map
@ -108,6 +108,7 @@ class MapScalar(DatasetMap):
description=cls.doc,
range=handle_dtype(cls.dtype),
**QUANTITY_MAP[cls.quantity],
**defaults(cls),
)
res = BuildResult(slots=[this_slot])
return res
@ -208,7 +209,19 @@ class MapScalarAttributes(DatasetMap):
"""
Map to a scalar attribute with an adjoining "value" slot
"""
value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True)
# the *value slot* within the generated class is always required,
# but the slot in the parent class referring to this one will indicate whether the whole
# thing is optional or not. You can't provide the attributes of the optional dataset
# without providing its value
quantity = QUANTITY_MAP[cls.quantity].copy()
quantity["required"] = True
value_slot = SlotDefinition(
name="value",
range=handle_dtype(cls.dtype),
**quantity,
**defaults(cls),
)
res.classes[0].attributes["value"] = value_slot
return res

View file

@ -266,11 +266,7 @@ class NamespacesAdapter(Adapter):
in_schema = False
if isinstance(cls, str) and cls in [
c.neurodata_type_def for c in schema.created_classes
]:
in_schema = True
elif isinstance(cls, Dataset) and cls in schema.datasets:
in_schema = True
elif isinstance(cls, Group) and cls in schema.groups:
] or isinstance(cls, Dataset) and cls in schema.datasets or isinstance(cls, Group) and cls in schema.groups:
in_schema = True
if in_schema:
@ -397,16 +393,16 @@ def roll_down_nwb_class(
Merge an ancestor (via ``neurodata_type_inc`` ) source class with a
child ``target`` class.
On the first recurive pass, only those values that are set on the target are copied from the
On the first recursive pass, only those values that are set on the target are copied from the
source class - this isn't a true merging, what we are after is to recursively merge all the
values that are modified in the child class with those of the parent class below the top level,
the top-level attributes will be carried through via normal inheritance.
Rather than re-instantiating the child class, we return the dictionary so that this
function can be used in series to merge a whole ancestry chain within
:class:`.NamespacesAdapter` , but this isn't exposed in the function since
class definitions can be spread out over many schemas, and we need the orchestration
of the adapter to have them in all cases we'd be using this.
:class:`.NamespacesAdapter` , but merging isn't exposed in the function since
ancestor class definitions can be spread out over many schemas,
and we need the orchestration of the adapter to have them in all cases we'd be using this.
Args:
source (dict): source dictionary
@ -420,9 +416,9 @@ def roll_down_nwb_class(
"""
if isinstance(source, (Group, Dataset)):
source = source.model_dump(exclude_unset=True, exclude_none=True)
source = source.model_dump(exclude_none=True)
if isinstance(target, (Group, Dataset)):
target = target.model_dump(exclude_unset=True, exclude_none=True)
target = target.model_dump(exclude_none=True)
exclude = ("neurodata_type_def",)

View file

@ -66,6 +66,26 @@ flat_to_np = {
"isodatetime": np.datetime64,
}
integer_types = {
"long",
"int64",
"int",
"int32",
"int16",
"short",
"int8",
"uint",
"uint32",
"uint16",
"uint8",
"uint64",
}
float_types = {"float", "float32", "double", "float64", "numeric"}
string_types = {"text", "utf", "utf8", "utf_8", "ascii"}
np_to_python = {
Any: Any,
np.number: float,

View file

@ -9,10 +9,16 @@ We will handle cardinality of array dimensions elsewhere
"""
QUANTITY_MAP = {
"*": {"required": False, "multivalued": True},
"*": {"required": None, "multivalued": True},
"+": {"required": True, "multivalued": True},
"?": {"required": False, "multivalued": False},
1: {"required": True, "multivalued": False},
"?": {"required": None, "multivalued": None},
1: {"required": True, "multivalued": None},
# include the NoneType for indexing
None: {"required": None, "multivalued": None},
}
"""
Map between NWB quantity values and linkml quantity metaslot values.
Use ``None`` for defaults (required: False, multivalued: False) rather than ``False``
to avoid adding unnecessary attributes
"""