- Get imports for parent class slots

- Handle scalar valued datasets with subattributes
- start on hdf5
This commit is contained in:
sneakers-the-rat 2023-09-04 16:16:29 -07:00
parent bb9dda6e66
commit 8f4f99cffd
6 changed files with 2052 additions and 10 deletions

1952
hdf5_linkml/poetry.lock generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,20 @@
[tool.poetry]
name = "hdf5-linkml"
version = "0.1.0"
description = "Adapt and load hdf5 files to linkml-pydantic models"
authors = ["sneakers-the-rat <JLSaunders987@gmail.com>"]
license = "AGPL-3.0"
readme = "README.md"
packages = [{include = "hdf5_linkml", from="src"}]
[tool.poetry.dependencies]
python = "^3.11"
h5py = "^3.9.0"
nwb_linkml = { path = '..', develop = true, optional = true }
linkml = "^1.5.7"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View file

View file

@ -0,0 +1,31 @@
"""
Base I/O class for loading and saving hdf5 files
"""
from typing import List
from linkml_runtime.linkml_model import SchemaDefinition
class H5File:
pass
# --------------------------------------------------
# Hooks
# --------------------------------------------------
def load_embedded_schema(self, h5f) -> List[dict]:
"""
Load schema that are embedded within the hdf5 file
Returns:
"""
pass
def translate_schema(self, dict) -> SchemaDefinition:
"""
Optionally translate schema from source language into LinkML
Args:
dict:
Returns:
"""

View file

@ -51,6 +51,30 @@ class DatasetAdapter(ClassAdapter):
)
res = BuildResult(slots = [this_slot])
# if the scalar-valued class has attributes, append a
# 'value' slot that holds the (scalar) value of the dataset
elif self.cls.neurodata_type_inc != 'VectorData' and \
not self.cls.neurodata_type_inc and \
self.cls.attributes and \
not self.cls.dims and \
not self.cls.shape and \
self.cls.name:
self._handlers.append('scalar_class')
# quantity (including requirement) is handled by the
# parent slot - the value is required if the value class is
# supplied.
# ie.
# Optional[ScalarClass] = None
# class ScalarClass:
# value: dtype
value_slot = SlotDefinition(
name='value',
range=self.handle_dtype(self.cls.dtype),
required=True
)
res.classes[0].attributes['value'] = value_slot
return res

View file

@ -25,7 +25,7 @@ from nwb_linkml.maps.dtype import flat_to_npytyping
from linkml.generators import PydanticGenerator
from linkml_runtime.linkml_model.meta import (
Annotation,
ClassDefinition,
ClassDefinition, ClassDefinitionName,
SchemaDefinition,
SlotDefinition,
SlotDefinitionName,
@ -219,6 +219,26 @@ class NWBPydanticGenerator(PydanticGenerator):
return imports
def _get_class_imports(
self,
cls:ClassDefinition,
sv:SchemaView,
all_classes:dict[ClassDefinitionName, ClassDefinition]) -> List[str]:
"""Get the imports needed for a single class"""
needed_classes = []
needed_classes.append(cls.is_a)
# get needed classes used as ranges in class attributes
for slot_name in sv.class_slots(cls.name):
slot = deepcopy(sv.induced_slot(slot_name, cls.name))
if slot.range in all_classes:
needed_classes.append(slot.range)
# handle when a range is a union of classes
if slot.any_of:
for any_slot_range in slot.any_of:
if any_slot_range.range in all_classes:
needed_classes.append(any_slot_range.range)
return needed_classes
def _get_imports(self, sv:SchemaView) -> Dict[str, List[str]]:
all_classes = sv.all_classes(imports=True)
@ -227,15 +247,10 @@ class NWBPydanticGenerator(PydanticGenerator):
# find needed classes - is_a and slot ranges
for clsname, cls in local_classes.items():
needed_classes.append(cls.is_a)
for slot_name, slot in cls.attributes.items():
if slot.range in all_classes:
needed_classes.append(slot.range)
if slot.any_of:
for any_slot_range in slot.any_of:
if any_slot_range.range in all_classes:
needed_classes.append(any_slot_range.range)
# get imports for this class
needed_classes.extend(self._get_class_imports(cls, sv, all_classes))
# remove duplicates and arraylikes
needed_classes = [cls for cls in set(needed_classes) if cls is not None and cls != 'Arraylike']
needed_classes = [cls for cls in needed_classes if sv.get_class(cls).is_a != 'Arraylike']
@ -251,7 +266,7 @@ class NWBPydanticGenerator(PydanticGenerator):
imported_classes.extend(classes)
module_classes = [c for c in list(module_classes) if c.is_a != 'Arraylike']
imported_classes = [c for c in imported_classes if sv.get_class(c).is_a != 'Arraylike']
imported_classes = [c for c in imported_classes if sv.get_class(c) and sv.get_class(c).is_a != 'Arraylike']
sorted_classes = self.sort_classes(module_classes, imported_classes)
self.sorted_class_names = [camelcase(cname) for cname in imported_classes]