diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 9419f01..64abf14 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -1,5 +1,6 @@ import numpy as np from typing import Any +from datetime import datetime flat_to_linkml = { "float" : "float", @@ -67,6 +68,7 @@ np_to_python = { np.integer: int, np.byte: bytes, np.bytes_: bytes, + np.datetime64: datetime, **{n:int for n in (np.int8, np.int16, np.int32, np.int64, np.short, np.uint8, np.uint16, np.uint32, np.uint64, np.uint)}, **{n:float for n in (np.float16, np.float32, np.floating, np.float32, np.float64, np.single, np.double, np.float_)}, **{n:str for n in (np.character, np.str_, np.string_, np.unicode_)} @@ -83,7 +85,8 @@ allowed_precisions = { 'float16': ['float16', 'float32', 'float64'], 'float32': ['float32', 'float64'], 'utf': ['ascii'], - 'number': ['short', 'int', 'long', 'int16', 'int32', 'int64', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'float', 'float16', 'float32', 'float64'] + 'number': ['short', 'int', 'long', 'int16', 'int32', 'int64', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'float', 'float16', 'float32', 'float64'], + 'datetime64': ['object'] } """ Following HDMF, it turns out that specifying precision actually specifies minimum precision diff --git a/nwb_linkml/src/nwb_linkml/maps/hdf5.py b/nwb_linkml/src/nwb_linkml/maps/hdf5.py index 7c32a42..9392a95 100644 --- a/nwb_linkml/src/nwb_linkml/maps/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/maps/hdf5.py @@ -9,6 +9,7 @@ import pdb from abc import abstractmethod from pathlib import Path from typing import Literal, List, Dict, Optional, Type, Union, Tuple +import inspect import h5py from enum import StrEnum @@ -20,6 +21,7 @@ from nwb_linkml.maps import Map from nwb_linkml.maps.hdmf import dynamictable_to_model from nwb_linkml.types.hdf5 import HDF5_Path from nwb_linkml.types.ndarray import NDArrayProxy +from nwb_linkml.annotations import unwrap_optional class ReadPhases(StrEnum): @@ -530,6 +532,16 @@ class CompleteModelGroups(HDF5Map): unpacked_results, errors, completes = resolve_references(src.result, completed) res.update(unpacked_results) + # now that we have the model in hand, we can solve any datasets that had an array + # but whose attributes are fixed (and thus should just be an array, rather than a subclass) + for k, v in src.model.model_fields.items(): + annotation = unwrap_optional(v.annotation) + if inspect.isclass(annotation) and not issubclass(annotation, BaseModel): + if isinstance(res, dict) and k in res and isinstance(res[k], dict) and 'array' in res[k]: + res[k] = res[k]['array'] + + + instance = src.model(**res) return H5ReadResult( path=src.path, diff --git a/nwb_linkml/tests/test_io/test_io_hdf5.py b/nwb_linkml/tests/test_io/test_io_hdf5.py index 47ac3b7..9d239d2 100644 --- a/nwb_linkml/tests/test_io/test_io_hdf5.py +++ b/nwb_linkml/tests/test_io/test_io_hdf5.py @@ -10,7 +10,6 @@ from ..fixtures import tmp_output_dir, set_config_vars, data_dir from nwb_linkml.io.hdf5 import HDF5IO from nwb_linkml.io.hdf5 import truncate_file -@pytest.mark.skip() @pytest.mark.parametrize('dset', ['aibs.nwb']) def test_hdf_read(data_dir, dset): NWBFILE = data_dir / dset