mirror of
https://github.com/p2p-ld/nwb-linkml.git
synced 2024-11-10 00:34:29 +00:00
Correctly handle container classes, use dicts instead of lists to be able to index by name. overriding __getitem__ and __getattr__ later
This commit is contained in:
parent
d16b4757e1
commit
f672b931e5
7 changed files with 51 additions and 21 deletions
|
@ -192,13 +192,15 @@ class ClassAdapter(Adapter):
|
|||
required=True,
|
||||
ifabsent=f'string({self.cls.name})',
|
||||
equals_string=self.cls.name,
|
||||
range='string'
|
||||
range='string',
|
||||
identifier=True
|
||||
)
|
||||
else:
|
||||
name_slot = SlotDefinition(
|
||||
name='name',
|
||||
required=True,
|
||||
range='string'
|
||||
range='string',
|
||||
identifier=True
|
||||
)
|
||||
return name_slot
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ class GroupAdapter(ClassAdapter):
|
|||
def build(self) -> BuildResult:
|
||||
# Handle container groups with only * quantity unnamed groups
|
||||
if len(self.cls.groups) > 0 and \
|
||||
all([self._check_if_container(g) for g in self.cls.groups]) and \
|
||||
self.parent is not None:
|
||||
all([self._check_if_container(g) for g in self.cls.groups]): # and \
|
||||
# self.parent is not None:
|
||||
return self.handle_container_group(self.cls)
|
||||
|
||||
# handle if we are a terminal container group without making a new class
|
||||
|
@ -58,22 +58,38 @@ class GroupAdapter(ClassAdapter):
|
|||
|
||||
"""
|
||||
|
||||
|
||||
# don't build subgroups as their own classes, just make a slot
|
||||
# that can contain them
|
||||
if not self.cls.name:
|
||||
name = 'children'
|
||||
else:
|
||||
if self.cls.name:
|
||||
name = cls.name
|
||||
# elif len(cls.groups) == 1:
|
||||
# name = camel_to_snake(cls.groups[0].neurodata_type_inc)
|
||||
else:
|
||||
name = 'children'
|
||||
|
||||
res = BuildResult(
|
||||
slots = [SlotDefinition(
|
||||
slot = SlotDefinition(
|
||||
name=name,
|
||||
multivalued=True,
|
||||
description=cls.doc,
|
||||
any_of=[{'range': subcls.neurodata_type_inc} for subcls in cls.groups]
|
||||
)]
|
||||
any_of=[{'range': subcls.neurodata_type_inc} for subcls in cls.groups],
|
||||
inlined=True,
|
||||
inlined_as_list=False
|
||||
)
|
||||
return res
|
||||
|
||||
if self.parent is not None:
|
||||
# if we have a parent,
|
||||
# just return the slot itself without the class
|
||||
slot.description = cls.doc
|
||||
return BuildResult(
|
||||
slots=[slot]
|
||||
)
|
||||
else:
|
||||
# We are a top-level container class like ProcessingModule
|
||||
base = self.build_base()
|
||||
# remove all the attributes and replace with child slot
|
||||
base.classes[0].attributes = [slot]
|
||||
return base
|
||||
|
||||
|
||||
def handle_container_slot(self, cls:Group) -> BuildResult:
|
||||
"""
|
||||
|
|
|
@ -67,7 +67,7 @@ def default_template(pydantic_ver: str = "1", extra_classes:Optional[List[Type[B
|
|||
from __future__ import annotations
|
||||
from datetime import datetime, date
|
||||
from enum import Enum
|
||||
from typing import List, Dict, Optional, Any, Union
|
||||
from typing import List, Dict, Optional, Any, Union, ClassVar
|
||||
from pydantic import BaseModel as BaseModel, Field
|
||||
from nptyping import Shape, Float, Float32, Double, Float64, LongLong, Int64, Int, Int32, Int16, Short, Int8, UInt, UInt32, UInt16, UInt8, UInt64, Number, String, Unicode, Unicode, Unicode, String, Bool, Datetime64
|
||||
from nwb_linkml.types import NDArray
|
||||
|
|
|
@ -57,6 +57,8 @@ AnyType = ClassDefinition(
|
|||
description="""Needed because some classes in hdmf-common are datasets without dtype"""
|
||||
)
|
||||
|
||||
|
||||
|
||||
NwbLangSchema = SchemaDefinition(
|
||||
name="nwb.language",
|
||||
id='nwb.language',
|
||||
|
|
|
@ -6,7 +6,7 @@ so we will make our own mapping class here and re-evaluate whether they should b
|
|||
"""
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Literal, List, Dict, Optional
|
||||
from typing import Literal, List, Dict, Optional, Type
|
||||
|
||||
import h5py
|
||||
from enum import StrEnum
|
||||
|
@ -14,7 +14,7 @@ from enum import StrEnum
|
|||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
from nwb_linkml.providers.schema import SchemaProvider
|
||||
from nwb_linkml.maps.hdmf import dynamictable_to_df
|
||||
from nwb_linkml.maps.hdmf import dynamictable_to_model
|
||||
from nwb_linkml.types.hdf5 import HDF5_Path
|
||||
|
||||
|
||||
|
@ -71,6 +71,10 @@ class H5ReadResult(BaseModel):
|
|||
If completed, built result. A dict that can be instantiated into the model.
|
||||
If completed is True and result is None, then remove this object
|
||||
"""
|
||||
model: Optional[Type[BaseModel]] = None
|
||||
"""
|
||||
The model that this item should be cast into
|
||||
"""
|
||||
completes: List[str] = Field(default_factory=list)
|
||||
"""
|
||||
If this result completes any other fields, we remove them from the build queue
|
||||
|
@ -181,7 +185,7 @@ class ResolveDynamicTable(HDF5Map):
|
|||
else:
|
||||
base_model = None
|
||||
|
||||
model = dynamictable_to_df(obj, base=base_model)
|
||||
model = dynamictable_to_model(obj, base=base_model)
|
||||
|
||||
completes = ['/'.join([src.path, child]) for child in obj.keys()]
|
||||
|
||||
|
@ -227,9 +231,12 @@ class ResolveModelGroup(HDF5Map):
|
|||
source=src,
|
||||
completed=True,
|
||||
result = res,
|
||||
model = model,
|
||||
namespace=src.namespace,
|
||||
neurodata_type=src.neurodata_type
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# class ResolveModelDataset(HDF5Map):
|
||||
# phase = ReadPhases.read
|
||||
|
|
|
@ -59,6 +59,9 @@ def model_from_dynamictable(group:h5py.Group, base:Optional[BaseModel] = None) -
|
|||
|
||||
#nptype = nptyping.typing_.name_per_dtype[group[col].dtype.type]
|
||||
nptype = group[col].dtype.type
|
||||
if nptype == np.void:
|
||||
warnings.warn(f"Cant handle numpy void type for column {col} in {group.name}")
|
||||
continue
|
||||
type_ = Optional[NDArray[Any, nptype]]
|
||||
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ from ..fixtures import tmp_output_dir, set_config_vars
|
|||
from nwb_linkml.io.hdf5 import HDF5IO
|
||||
from nwb_linkml.io.hdf5 import truncate_file
|
||||
|
||||
@pytest.mark.skip()
|
||||
# @pytest.mark.skip()
|
||||
def test_hdf_read():
|
||||
NWBFILE = Path('/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb')
|
||||
if not NWBFILE.exists():
|
||||
|
|
Loading…
Reference in a new issue