i'm just sorta making a mess here. coming back tomorrow

This commit is contained in:
sneakers-the-rat 2023-09-06 00:50:49 -07:00
parent 85beef4276
commit 6a9d612b41
17 changed files with 294 additions and 160 deletions

View file

@ -4,3 +4,5 @@ Translating NWB schema language to linkml
(very WIP dont @ me)
Just submitting to pypi to squat the package name
[![Coverage Status](https://coveralls.io/repos/github/p2p-ld/nwb-linkml/badge.svg)](https://coveralls.io/github/p2p-ld/nwb-linkml)

View file

@ -2336,11 +2336,10 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[extras]
dev = ["nwb_schema_language"]
plot = ["dash", "dash-cytoscape"]
tests = ["coverage", "coveralls", "pytest", "pytest-cov", "pytest-depends", "pytest-emoji", "pytest-md"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "b04ae68224e5d20ec7f929e29d318f02fcc14fc8ca486cddf567985b45fe5951"
content-hash = "5427416a9edebc2ab2c4f7f7c9779b2b9c7e4c1c2da5dcc0968ee633110973fa"

View file

@ -1,13 +1,96 @@
"""
This is a sandbox file that should be split out to its own pydantic-hdf5 package, but just experimenting here to get our bearings
"""
from typing import Optional, List, Dict
from typing import Optional, List, Dict, overload, Literal, Type, Any
from pathlib import Path
from types import ModuleType
from typing import TypeVar, TYPE_CHECKING
from abc import abstractmethod
import h5py
from pydantic import BaseModel
from dataclasses import dataclass, field
from nwb_linkml.translate import generate_from_nwbfile
#from nwb_linkml.models.core_nwb_file import NWBFile
if TYPE_CHECKING:
from nwb_linkml.models.core_nwb_file import NWBFile
@dataclass
class HDF5Element():
cls: h5py.Dataset | h5py.Group
models: Dict[str, ModuleType]
parent: Type[BaseModel]
@abstractmethod
def read(self) -> BaseModel | List[BaseModel]:
"""
Constructs the pydantic model from the given hdf5 element
"""
@abstractmethod
def write(self) -> h5py.Dataset | h5py.Group:
"""
Create the h5py object from the in-memory pydantic model
"""
@property
def name(self) -> str:
"""Just the terminal group name"""
return self.cls.name.split('/')[-1]
def get_model(self) -> Type[BaseModel | dict]:
"""
Find our model
- If we have a neurodata_type in our attrs, use that
- Otherwise, use our parent to resolve the type
"""
if 'neurodata_type' in self.cls.attrs.keys():
return get_model(self.cls.attrs, self.models)
else:
parent_model = get_model(self.cls.parent.attrs, self.models)
field = parent_model.model_fields.get(self.name)
if issubclass(type(field.annotation), BaseModel):
return field.annotation
else:
return dict
#raise NotImplementedError('Need to unpack at least listlike annotations')
@dataclass
class H5Dataset(HDF5Element):
cls: h5py.Dataset
def read(self) -> Any:
if self.cls.shape == ():
return self.cls[()]
elif len(self.cls.shape) == 1:
return self.cls[:].tolist()
else:
raise NotImplementedError('oop')
@dataclass
class H5Group(HDF5Element):
cls: h5py.Group
def read(self) -> BaseModel:
data = {}
model = self.get_model()
model_attrs = {
k:v for k, v in self.cls.attrs.items() if k in model.model_fields.keys()
}
data.update(model_attrs)
for k, v in self.cls.items():
if isinstance(v, h5py.Group):
data[k] = H5Group(cls=v, models=self.models, parent=model).read()
elif isinstance(v, h5py.Dataset):
data[k] = H5Dataset(cls=v, models=self.models, parent=model).read()
return model(**data)
class HDF5IO():
@ -21,6 +104,38 @@ class HDF5IO():
self._modules = generate_from_nwbfile(self.path)
return self._modules
@overload
def read(self, path:None) -> 'NWBFile': ...
@overload
def read(self, path:str) -> BaseModel | Dict[str, BaseModel]: ...
def read(self, path:Optional[str] = None):
h5f = h5py.File(str(self.path))
if path:
src = h5f.get(path)
parent = get_model(src.attrs, self.modules)
else:
src = h5f
parent = getattr(self.modules['core'], 'NWBFile')
data = {}
for k, v in src.items():
if isinstance(v, h5py.Group):
data[k] = H5Group(cls=v, models=self.modules, parent=parent).read()
elif isinstance(v, h5py.Dataset):
data[k] = H5Dataset(cls=v, models=self.modules, parent=parent).read()
if path is None:
return parent(**data)
if 'neurodata_type' in src.attrs:
raise NotImplementedError('Making a submodel not supported yet')
else:
return data
def process_group(self, group:h5py.Group|h5py.File) -> dict | list:
attrs = dict(group.attrs)
@ -53,9 +168,13 @@ class HDF5IO():
if len(data.shape) == 1:
return list(data[:])
def get_model(attrs: h5py.AttributeManager, models: Dict[str, ModuleType]) -> Type[BaseModel]:
ns = attrs.get('namespace')
model_name = attrs.get('neurodata_type')
return getattr(models[ns], model_name)
if __name__ == "__main__":
NWBFILE = Path('/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb')
h5f = HDF5IO(NWBFILE)
# if __name__ == "__main__":
# NWBFILE = Path('/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb')
# h5f = HDF5IO(NWBFILE)

View file

@ -172,17 +172,17 @@ class ImagesOrderOfImages(ImageReferences):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
NWBData.model_rebuild()
TimeSeriesReferenceVectorData.model_rebuild()
Image.model_rebuild()
ImageReferences.model_rebuild()
NWBContainer.model_rebuild()
NWBDataInterface.model_rebuild()
TimeSeries.model_rebuild()
TimeSeriesData.model_rebuild()
TimeSeriesStartingTime.model_rebuild()
TimeSeriesSync.model_rebuild()
ProcessingModule.model_rebuild()
Images.model_rebuild()
ImagesOrderOfImages.model_rebuild()
# NWBData.model_rebuild()
# TimeSeriesReferenceVectorData.model_rebuild()
# Image.model_rebuild()
# ImageReferences.model_rebuild()
# NWBContainer.model_rebuild()
# NWBDataInterface.model_rebuild()
# TimeSeries.model_rebuild()
# TimeSeriesData.model_rebuild()
# TimeSeriesStartingTime.model_rebuild()
# TimeSeriesSync.model_rebuild()
# ProcessingModule.model_rebuild()
# Images.model_rebuild()
# ImagesOrderOfImages.model_rebuild()

View file

@ -124,13 +124,13 @@ class Position(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
SpatialSeries.model_rebuild()
SpatialSeriesData.model_rebuild()
BehavioralEpochs.model_rebuild()
BehavioralEvents.model_rebuild()
BehavioralTimeSeries.model_rebuild()
PupilTracking.model_rebuild()
EyeTracking.model_rebuild()
CompassDirection.model_rebuild()
Position.model_rebuild()
# SpatialSeries.model_rebuild()
# SpatialSeriesData.model_rebuild()
# BehavioralEpochs.model_rebuild()
# BehavioralEvents.model_rebuild()
# BehavioralTimeSeries.model_rebuild()
# PupilTracking.model_rebuild()
# EyeTracking.model_rebuild()
# CompassDirection.model_rebuild()
# Position.model_rebuild()

View file

@ -40,5 +40,5 @@ class Device(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
Device.model_rebuild()
# Device.model_rebuild()

View file

@ -231,21 +231,21 @@ class Clustering(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ElectricalSeries.model_rebuild()
ElectricalSeriesData.model_rebuild()
ElectricalSeriesElectrodes.model_rebuild()
SpikeEventSeries.model_rebuild()
SpikeEventSeriesData.model_rebuild()
FeatureExtraction.model_rebuild()
FeatureExtractionFeatures.model_rebuild()
FeatureExtractionElectrodes.model_rebuild()
EventDetection.model_rebuild()
EventWaveform.model_rebuild()
FilteredEphys.model_rebuild()
LFP.model_rebuild()
ElectrodeGroup.model_rebuild()
ClusterWaveforms.model_rebuild()
ClusterWaveformsWaveformMean.model_rebuild()
ClusterWaveformsWaveformSd.model_rebuild()
Clustering.model_rebuild()
# ElectricalSeries.model_rebuild()
# ElectricalSeriesData.model_rebuild()
# ElectricalSeriesElectrodes.model_rebuild()
# SpikeEventSeries.model_rebuild()
# SpikeEventSeriesData.model_rebuild()
# FeatureExtraction.model_rebuild()
# FeatureExtractionFeatures.model_rebuild()
# FeatureExtractionElectrodes.model_rebuild()
# EventDetection.model_rebuild()
# EventWaveform.model_rebuild()
# FilteredEphys.model_rebuild()
# LFP.model_rebuild()
# ElectrodeGroup.model_rebuild()
# ClusterWaveforms.model_rebuild()
# ClusterWaveformsWaveformMean.model_rebuild()
# ClusterWaveformsWaveformSd.model_rebuild()
# Clustering.model_rebuild()

View file

@ -98,8 +98,8 @@ class TimeIntervalsTimeseriesIndex(VectorIndex):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
TimeIntervals.model_rebuild()
TimeIntervalsTagsIndex.model_rebuild()
TimeIntervalsTimeseries.model_rebuild()
TimeIntervalsTimeseriesIndex.model_rebuild()
# TimeIntervals.model_rebuild()
# TimeIntervalsTagsIndex.model_rebuild()
# TimeIntervalsTimeseries.model_rebuild()
# TimeIntervalsTimeseriesIndex.model_rebuild()

View file

@ -218,15 +218,15 @@ class SubjectAge(ConfiguredBaseModel):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ScratchData.model_rebuild()
NWBFile.model_rebuild()
NWBFileStimulus.model_rebuild()
NWBFileGeneral.model_rebuild()
NWBFileGeneralSourceScript.model_rebuild()
NWBFileGeneralExtracellularEphys.model_rebuild()
NWBFileGeneralIntracellularEphys.model_rebuild()
NWBFileIntervals.model_rebuild()
LabMetaData.model_rebuild()
Subject.model_rebuild()
SubjectAge.model_rebuild()
# ScratchData.model_rebuild()
# NWBFile.model_rebuild()
# NWBFileStimulus.model_rebuild()
# NWBFileGeneral.model_rebuild()
# NWBFileGeneralSourceScript.model_rebuild()
# NWBFileGeneralExtracellularEphys.model_rebuild()
# NWBFileGeneralIntracellularEphys.model_rebuild()
# NWBFileIntervals.model_rebuild()
# LabMetaData.model_rebuild()
# Subject.model_rebuild()
# SubjectAge.model_rebuild()

View file

@ -560,42 +560,42 @@ class ExperimentalConditionsTableRepetitionsIndex(VectorIndex):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
PatchClampSeries.model_rebuild()
CurrentClampSeries.model_rebuild()
CurrentClampSeriesData.model_rebuild()
IZeroClampSeries.model_rebuild()
CurrentClampStimulusSeries.model_rebuild()
CurrentClampStimulusSeriesData.model_rebuild()
VoltageClampSeries.model_rebuild()
VoltageClampSeriesData.model_rebuild()
VoltageClampSeriesCapacitanceFast.model_rebuild()
VoltageClampSeriesCapacitanceSlow.model_rebuild()
VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
VoltageClampSeriesResistanceCompCorrection.model_rebuild()
VoltageClampSeriesResistanceCompPrediction.model_rebuild()
VoltageClampSeriesWholeCellCapacitanceComp.model_rebuild()
VoltageClampSeriesWholeCellSeriesResistanceComp.model_rebuild()
VoltageClampStimulusSeries.model_rebuild()
VoltageClampStimulusSeriesData.model_rebuild()
IntracellularElectrode.model_rebuild()
SweepTable.model_rebuild()
SweepTableSeriesIndex.model_rebuild()
IntracellularElectrodesTable.model_rebuild()
IntracellularStimuliTable.model_rebuild()
IntracellularStimuliTableStimulus.model_rebuild()
IntracellularResponsesTable.model_rebuild()
IntracellularResponsesTableResponse.model_rebuild()
IntracellularRecordingsTable.model_rebuild()
SimultaneousRecordingsTable.model_rebuild()
SimultaneousRecordingsTableRecordings.model_rebuild()
SimultaneousRecordingsTableRecordingsIndex.model_rebuild()
SequentialRecordingsTable.model_rebuild()
SequentialRecordingsTableSimultaneousRecordings.model_rebuild()
SequentialRecordingsTableSimultaneousRecordingsIndex.model_rebuild()
RepetitionsTable.model_rebuild()
RepetitionsTableSequentialRecordings.model_rebuild()
RepetitionsTableSequentialRecordingsIndex.model_rebuild()
ExperimentalConditionsTable.model_rebuild()
ExperimentalConditionsTableRepetitions.model_rebuild()
ExperimentalConditionsTableRepetitionsIndex.model_rebuild()
# PatchClampSeries.model_rebuild()
# CurrentClampSeries.model_rebuild()
# CurrentClampSeriesData.model_rebuild()
# IZeroClampSeries.model_rebuild()
# CurrentClampStimulusSeries.model_rebuild()
# CurrentClampStimulusSeriesData.model_rebuild()
# VoltageClampSeries.model_rebuild()
# VoltageClampSeriesData.model_rebuild()
# VoltageClampSeriesCapacitanceFast.model_rebuild()
# VoltageClampSeriesCapacitanceSlow.model_rebuild()
# VoltageClampSeriesResistanceCompBandwidth.model_rebuild()
# VoltageClampSeriesResistanceCompCorrection.model_rebuild()
# VoltageClampSeriesResistanceCompPrediction.model_rebuild()
# VoltageClampSeriesWholeCellCapacitanceComp.model_rebuild()
# VoltageClampSeriesWholeCellSeriesResistanceComp.model_rebuild()
# VoltageClampStimulusSeries.model_rebuild()
# VoltageClampStimulusSeriesData.model_rebuild()
# IntracellularElectrode.model_rebuild()
# SweepTable.model_rebuild()
# SweepTableSeriesIndex.model_rebuild()
# IntracellularElectrodesTable.model_rebuild()
# IntracellularStimuliTable.model_rebuild()
# IntracellularStimuliTableStimulus.model_rebuild()
# IntracellularResponsesTable.model_rebuild()
# IntracellularResponsesTableResponse.model_rebuild()
# IntracellularRecordingsTable.model_rebuild()
# SimultaneousRecordingsTable.model_rebuild()
# SimultaneousRecordingsTableRecordings.model_rebuild()
# SimultaneousRecordingsTableRecordingsIndex.model_rebuild()
# SequentialRecordingsTable.model_rebuild()
# SequentialRecordingsTableSimultaneousRecordings.model_rebuild()
# SequentialRecordingsTableSimultaneousRecordingsIndex.model_rebuild()
# RepetitionsTable.model_rebuild()
# RepetitionsTableSequentialRecordings.model_rebuild()
# RepetitionsTableSequentialRecordingsIndex.model_rebuild()
# ExperimentalConditionsTable.model_rebuild()
# ExperimentalConditionsTableRepetitions.model_rebuild()
# ExperimentalConditionsTableRepetitionsIndex.model_rebuild()
#

View file

@ -169,14 +169,14 @@ class IndexSeries(TimeSeries):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
GrayscaleImage.model_rebuild()
RGBImage.model_rebuild()
RGBAImage.model_rebuild()
ImageSeries.model_rebuild()
ImageSeriesData.model_rebuild()
ImageMaskSeries.model_rebuild()
OpticalSeries.model_rebuild()
OpticalSeriesFieldOfView.model_rebuild()
OpticalSeriesData.model_rebuild()
IndexSeries.model_rebuild()
# GrayscaleImage.model_rebuild()
# RGBImage.model_rebuild()
# RGBAImage.model_rebuild()
# ImageSeries.model_rebuild()
# ImageSeriesData.model_rebuild()
# ImageMaskSeries.model_rebuild()
# OpticalSeries.model_rebuild()
# OpticalSeriesFieldOfView.model_rebuild()
# OpticalSeriesData.model_rebuild()
# IndexSeries.model_rebuild()

View file

@ -335,23 +335,23 @@ class UnitsWaveformsIndexIndex(VectorIndex):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
AbstractFeatureSeries.model_rebuild()
AbstractFeatureSeriesData.model_rebuild()
AnnotationSeries.model_rebuild()
IntervalSeries.model_rebuild()
DecompositionSeries.model_rebuild()
DecompositionSeriesData.model_rebuild()
DecompositionSeriesSourceChannels.model_rebuild()
Units.model_rebuild()
UnitsSpikeTimesIndex.model_rebuild()
UnitsSpikeTimes.model_rebuild()
UnitsObsIntervalsIndex.model_rebuild()
UnitsObsIntervals.model_rebuild()
UnitsElectrodesIndex.model_rebuild()
UnitsElectrodes.model_rebuild()
UnitsWaveformMean.model_rebuild()
UnitsWaveformSd.model_rebuild()
UnitsWaveforms.model_rebuild()
UnitsWaveformsIndex.model_rebuild()
UnitsWaveformsIndexIndex.model_rebuild()
# AbstractFeatureSeries.model_rebuild()
# AbstractFeatureSeriesData.model_rebuild()
# AnnotationSeries.model_rebuild()
# IntervalSeries.model_rebuild()
# DecompositionSeries.model_rebuild()
# DecompositionSeriesData.model_rebuild()
# DecompositionSeriesSourceChannels.model_rebuild()
# Units.model_rebuild()
# UnitsSpikeTimesIndex.model_rebuild()
# UnitsSpikeTimes.model_rebuild()
# UnitsObsIntervalsIndex.model_rebuild()
# UnitsObsIntervals.model_rebuild()
# UnitsElectrodesIndex.model_rebuild()
# UnitsElectrodes.model_rebuild()
# UnitsWaveformMean.model_rebuild()
# UnitsWaveformSd.model_rebuild()
# UnitsWaveforms.model_rebuild()
# UnitsWaveformsIndex.model_rebuild()
# UnitsWaveformsIndexIndex.model_rebuild()

View file

@ -59,6 +59,6 @@ class OptogeneticStimulusSite(NWBContainer):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OptogeneticSeries.model_rebuild()
OptogeneticStimulusSite.model_rebuild()
# OptogeneticSeries.model_rebuild()
# OptogeneticStimulusSite.model_rebuild()

View file

@ -310,24 +310,24 @@ class CorrectedImageStack(NWBDataInterface):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
OnePhotonSeries.model_rebuild()
TwoPhotonSeries.model_rebuild()
TwoPhotonSeriesFieldOfView.model_rebuild()
RoiResponseSeries.model_rebuild()
RoiResponseSeriesData.model_rebuild()
RoiResponseSeriesRois.model_rebuild()
DfOverF.model_rebuild()
Fluorescence.model_rebuild()
ImageSegmentation.model_rebuild()
PlaneSegmentation.model_rebuild()
PlaneSegmentationImageMask.model_rebuild()
PlaneSegmentationPixelMaskIndex.model_rebuild()
PlaneSegmentationVoxelMaskIndex.model_rebuild()
ImagingPlane.model_rebuild()
ImagingPlaneManifold.model_rebuild()
ImagingPlaneOriginCoords.model_rebuild()
ImagingPlaneGridSpacing.model_rebuild()
OpticalChannel.model_rebuild()
MotionCorrection.model_rebuild()
CorrectedImageStack.model_rebuild()
# OnePhotonSeries.model_rebuild()
# TwoPhotonSeries.model_rebuild()
# TwoPhotonSeriesFieldOfView.model_rebuild()
# RoiResponseSeries.model_rebuild()
# RoiResponseSeriesData.model_rebuild()
# RoiResponseSeriesRois.model_rebuild()
# DfOverF.model_rebuild()
# Fluorescence.model_rebuild()
# ImageSegmentation.model_rebuild()
# PlaneSegmentation.model_rebuild()
# PlaneSegmentationImageMask.model_rebuild()
# PlaneSegmentationPixelMaskIndex.model_rebuild()
# PlaneSegmentationVoxelMaskIndex.model_rebuild()
# ImagingPlane.model_rebuild()
# ImagingPlaneManifold.model_rebuild()
# ImagingPlaneOriginCoords.model_rebuild()
# ImagingPlaneGridSpacing.model_rebuild()
# OpticalChannel.model_rebuild()
# MotionCorrection.model_rebuild()
# CorrectedImageStack.model_rebuild()
#

View file

@ -125,12 +125,12 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel):
# Model rebuild
# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model
ImagingRetinotopy.model_rebuild()
ImagingRetinotopyAxis1PhaseMap.model_rebuild()
ImagingRetinotopyAxis1PowerMap.model_rebuild()
ImagingRetinotopyAxis2PhaseMap.model_rebuild()
ImagingRetinotopyAxis2PowerMap.model_rebuild()
ImagingRetinotopyFocalDepthImage.model_rebuild()
ImagingRetinotopySignMap.model_rebuild()
ImagingRetinotopyVasculatureImage.model_rebuild()
# ImagingRetinotopy.model_rebuild()
# ImagingRetinotopyAxis1PhaseMap.model_rebuild()
# ImagingRetinotopyAxis1PowerMap.model_rebuild()
# ImagingRetinotopyAxis2PhaseMap.model_rebuild()
# ImagingRetinotopyAxis2PowerMap.model_rebuild()
# ImagingRetinotopyFocalDepthImage.model_rebuild()
# ImagingRetinotopySignMap.model_rebuild()
# ImagingRetinotopyVasculatureImage.model_rebuild()

View file

@ -16,6 +16,7 @@ from nwb_linkml.io.schema import load_schema_file
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
from nwb_linkml.map import apply_preload
from nwb_linkml.adapters import SchemaAdapter, NamespacesAdapter
#from nwb_linkml.models import core, hdmf_common
def make_namespace_adapter(schema: dict) -> NamespacesAdapter:
"""
@ -118,6 +119,8 @@ def generate_from_nwbfile(path:Path) -> Dict[str, ModuleType]:
namespaces = []
h5f = h5py.File(path, 'r')
for ns_name, ns in h5f['specifications'].items():
#if ns_name in ('core', 'hdmf-common'):
# continue
ns_schema = {}
for version in ns.values():
for schema_name, schema in version.items():
@ -133,6 +136,7 @@ def generate_from_nwbfile(path:Path) -> Dict[str, ModuleType]:
adapter.namespaces.namespaces[0].name: generate_pydantic(adapter)
for adapter in adapters
}
#pydantic_modules.update({'core': core, 'hdmf-common': hdmf_common})
return pydantic_modules

View file

@ -0,0 +1,10 @@
import pytest
from pathlib import Path
from nwb_linkml.io.hdf5 import HDF5IO
def test_hdf_read():
NWBFILE = Path('/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb')
io = HDF5IO(path=NWBFILE)
model = io.read('/general')