messy ass checkpoint before i come back and clean it up

This commit is contained in:
sneakers-the-rat 2024-10-03 18:44:43 -07:00
parent 1f7955d6ef
commit 1afe359681
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
3 changed files with 14 additions and 9 deletions

View file

@ -39,7 +39,6 @@ as ``S32`` isoformatted byte strings (timezones optional) like:
""" """
import pdb
import sys import sys
from datetime import datetime from datetime import datetime
from pathlib import Path from pathlib import Path
@ -171,7 +170,7 @@ class H5Proxy:
__pydantic_serializer__ = SchemaSerializer( __pydantic_serializer__ = SchemaSerializer(
core_schema.plain_serializer_function_ser_schema( core_schema.plain_serializer_function_ser_schema(
to_json, when_used="json", info_arg=True to_json, when_used="json", info_arg=True
) ),
) )
def __init__( def __init__(
@ -218,17 +217,18 @@ class H5Proxy:
return obj[:] return obj[:]
def __getattr__(self, item: str): def __getattr__(self, item: str):
if item not in ("shape", "__pydantic_validator__"):
pdb.set_trace()
if item == "__name__": if item == "__name__":
# special case for H5Proxies that don't refer to a real file during testing # special case for H5Proxies that don't refer to a real file during testing
return "H5Proxy" return "H5Proxy"
elif item.startswith("__"): elif item.startswith("__"):
return object.__getattribute__(self, item) return object.__getattribute__(self, item)
with h5py.File(self.file, "r") as h5f: try:
obj = h5f.get(self.path) with h5py.File(self.file, "r") as h5f:
val = getattr(obj, item) obj = h5f.get(self.path)
return val val = getattr(obj, item)
return val
except AttributeError:
return object.__getattribute__(self, item)
def __getitem__( def __getitem__(
self, item: Union[int, slice, Tuple[Union[int, slice], ...]] self, item: Union[int, slice, Tuple[Union[int, slice], ...]]
@ -303,7 +303,8 @@ class H5Proxy:
if isinstance(other, H5Proxy): if isinstance(other, H5Proxy):
return self._h5arraypath == other._h5arraypath return self._h5arraypath == other._h5arraypath
else: else:
raise ValueError("Can only compare equality of two H5Proxies") return False
# raise ValueError("Can only compare equality of two H5Proxies")
def open(self, mode: str = "r") -> "h5py.Dataset": def open(self, mode: str = "r") -> "h5py.Dataset":
""" """

View file

@ -13,6 +13,7 @@ Extension of nptyping NDArray for pydantic that allows for JSON-Schema serializa
""" """
import pdb
from typing import TYPE_CHECKING, Any, Literal, Tuple, get_origin from typing import TYPE_CHECKING, Any, Literal, Tuple, get_origin
import numpy as np import numpy as np
@ -201,6 +202,7 @@ class NDArray(NPTypingType, metaclass=NDArrayMeta):
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
) -> core_schema.JsonSchema: ) -> core_schema.JsonSchema:
shape, dtype = cls.__args__ shape, dtype = cls.__args__
pdb.set_trace()
json_schema = handler(schema["metadata"]) json_schema = handler(schema["metadata"])
json_schema = handler.resolve_ref_schema(json_schema) json_schema = handler.resolve_ref_schema(json_schema)

View file

@ -16,6 +16,8 @@ U = TypeVar("U")
def jsonize_array(value: Any, info: SerializationInfo) -> Union[list, dict]: def jsonize_array(value: Any, info: SerializationInfo) -> Union[list, dict]:
"""Use an interface class to render an array as JSON""" """Use an interface class to render an array as JSON"""
# return [1, 2, 3]
# pdb.set_trace()
interface_cls = Interface.match_output(value) interface_cls = Interface.match_output(value)
array = interface_cls.to_json(value, info) array = interface_cls.to_json(value, info)
array = postprocess_json(array, info) array = postprocess_json(array, info)