mirror of
https://github.com/p2p-ld/nwb-linkml.git
synced 2025-01-09 21:54:27 +00:00
remove unused code, nocover some debug arms
This commit is contained in:
parent
d31ac29294
commit
bb59c9d465
7 changed files with 39 additions and 70 deletions
|
@ -174,7 +174,7 @@ class AttributeAdapter(Adapter):
|
||||||
"""
|
"""
|
||||||
map = self.match()
|
map = self.match()
|
||||||
res = map.apply(self.cls)
|
res = map.apply(self.cls)
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
res = self._amend_debug(res, map)
|
res = self._amend_debug(res, map)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ class AttributeAdapter(Adapter):
|
||||||
|
|
||||||
def _amend_debug(
|
def _amend_debug(
|
||||||
self, res: BuildResult, map: Optional[Type[AttributeMap]] = None
|
self, res: BuildResult, map: Optional[Type[AttributeMap]] = None
|
||||||
) -> BuildResult:
|
) -> BuildResult: # pragma: no cover - only used in development
|
||||||
map_name = "None" if map is None else map.__name__
|
map_name = "None" if map is None else map.__name__
|
||||||
for cls in res.classes:
|
for cls in res.classes:
|
||||||
cls.annotations["attribute_map"] = {"tag": "attribute_map", "value": map_name}
|
cls.annotations["attribute_map"] = {"tag": "attribute_map", "value": map_name}
|
||||||
|
|
|
@ -92,7 +92,7 @@ class ClassAdapter(Adapter):
|
||||||
# Get vanilla top-level attributes
|
# Get vanilla top-level attributes
|
||||||
kwargs["attributes"].extend(self.build_attrs(self.cls))
|
kwargs["attributes"].extend(self.build_attrs(self.cls))
|
||||||
|
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
kwargs["annotations"] = {}
|
kwargs["annotations"] = {}
|
||||||
kwargs["annotations"]["group_adapter"] = {
|
kwargs["annotations"]["group_adapter"] = {
|
||||||
"tag": "group_adapter",
|
"tag": "group_adapter",
|
||||||
|
@ -254,6 +254,6 @@ class ClassAdapter(Adapter):
|
||||||
inlined=True,
|
inlined=True,
|
||||||
**QUANTITY_MAP[self.cls.quantity],
|
**QUANTITY_MAP[self.cls.quantity],
|
||||||
)
|
)
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "self_slot"}
|
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "self_slot"}
|
||||||
return slot
|
return slot
|
||||||
|
|
|
@ -744,7 +744,7 @@ class DatasetAdapter(ClassAdapter):
|
||||||
if map is not None:
|
if map is not None:
|
||||||
res = map.apply(self.cls, res, self._get_full_name())
|
res = map.apply(self.cls, res, self._get_full_name())
|
||||||
|
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
res = self._amend_debug(res, map)
|
res = self._amend_debug(res, map)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
@ -771,7 +771,9 @@ class DatasetAdapter(ClassAdapter):
|
||||||
else:
|
else:
|
||||||
return matches[0]
|
return matches[0]
|
||||||
|
|
||||||
def _amend_debug(self, res: BuildResult, map: Optional[Type[DatasetMap]] = None) -> BuildResult:
|
def _amend_debug(
|
||||||
|
self, res: BuildResult, map: Optional[Type[DatasetMap]] = None
|
||||||
|
) -> BuildResult: # pragma: no cover - only used in development
|
||||||
map_name = "None" if map is None else map.__name__
|
map_name = "None" if map is None else map.__name__
|
||||||
for cls in res.classes:
|
for cls in res.classes:
|
||||||
cls.annotations["dataset_map"] = {"tag": "dataset_map", "value": map_name}
|
cls.annotations["dataset_map"] = {"tag": "dataset_map", "value": map_name}
|
||||||
|
|
|
@ -70,7 +70,7 @@ class GroupAdapter(ClassAdapter):
|
||||||
|
|
||||||
annotations = [{"tag": "source_type", "value": "link"}]
|
annotations = [{"tag": "source_type", "value": "link"}]
|
||||||
|
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
annotations.append({"tag": "group_adapter", "value": "link"})
|
annotations.append({"tag": "group_adapter", "value": "link"})
|
||||||
|
|
||||||
slots = [
|
slots = [
|
||||||
|
@ -117,7 +117,7 @@ class GroupAdapter(ClassAdapter):
|
||||||
inlined_as_list=False,
|
inlined_as_list=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_group"}
|
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_group"}
|
||||||
|
|
||||||
if self.parent is not None:
|
if self.parent is not None:
|
||||||
|
@ -162,7 +162,7 @@ class GroupAdapter(ClassAdapter):
|
||||||
**QUANTITY_MAP[cls.quantity],
|
**QUANTITY_MAP[cls.quantity],
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
|
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
|
||||||
|
|
||||||
return BuildResult(slots=[slot])
|
return BuildResult(slots=[slot])
|
||||||
|
@ -214,7 +214,7 @@ class GroupAdapter(ClassAdapter):
|
||||||
inlined_as_list=True,
|
inlined_as_list=True,
|
||||||
**QUANTITY_MAP[self.cls.quantity],
|
**QUANTITY_MAP[self.cls.quantity],
|
||||||
)
|
)
|
||||||
if self.debug:
|
if self.debug: # pragma: no cover - only used in development
|
||||||
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
|
slot.annotations["group_adapter"] = {"tag": "group_adapter", "value": "container_slot"}
|
||||||
return slot
|
return slot
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,7 @@ customized to support NWB models.
|
||||||
See class and module docstrings for details :)
|
See class and module docstrings for details :)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pdb
|
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
@ -24,7 +22,6 @@ from linkml_runtime.linkml_model.meta import (
|
||||||
SlotDefinition,
|
SlotDefinition,
|
||||||
SlotDefinitionName,
|
SlotDefinitionName,
|
||||||
)
|
)
|
||||||
from linkml_runtime.utils.compile_python import file_text
|
|
||||||
from linkml_runtime.utils.formatutils import remove_empty_items
|
from linkml_runtime.utils.formatutils import remove_empty_items
|
||||||
from linkml_runtime.utils.schemaview import SchemaView
|
from linkml_runtime.utils.schemaview import SchemaView
|
||||||
|
|
||||||
|
@ -214,15 +211,17 @@ class AfterGenerateSlot:
|
||||||
# merge injects/imports from the numpydantic array without using the merge method
|
# merge injects/imports from the numpydantic array without using the merge method
|
||||||
if slot.injected_classes is None:
|
if slot.injected_classes is None:
|
||||||
slot.injected_classes = NumpydanticArray.INJECTS.copy()
|
slot.injected_classes = NumpydanticArray.INJECTS.copy()
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.injected_classes.extend(NumpydanticArray.INJECTS.copy())
|
slot.injected_classes.extend(NumpydanticArray.INJECTS.copy())
|
||||||
if isinstance(slot.imports, list):
|
if isinstance(
|
||||||
|
slot.imports, list
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.imports = (
|
slot.imports = (
|
||||||
Imports(imports=slot.imports) + NumpydanticArray.IMPORTS.model_copy()
|
Imports(imports=slot.imports) + NumpydanticArray.IMPORTS.model_copy()
|
||||||
)
|
)
|
||||||
elif isinstance(slot.imports, Imports):
|
elif isinstance(slot.imports, Imports):
|
||||||
slot.imports += NumpydanticArray.IMPORTS.model_copy()
|
slot.imports += NumpydanticArray.IMPORTS.model_copy()
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.imports = NumpydanticArray.IMPORTS.model_copy()
|
slot.imports = NumpydanticArray.IMPORTS.model_copy()
|
||||||
|
|
||||||
return slot
|
return slot
|
||||||
|
@ -239,13 +238,15 @@ class AfterGenerateSlot:
|
||||||
named_injects = [ModelTypeString, _get_name, NamedString]
|
named_injects = [ModelTypeString, _get_name, NamedString]
|
||||||
if slot.injected_classes is None:
|
if slot.injected_classes is None:
|
||||||
slot.injected_classes = named_injects
|
slot.injected_classes = named_injects
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.injected_classes.extend([ModelTypeString, _get_name, NamedString])
|
slot.injected_classes.extend([ModelTypeString, _get_name, NamedString])
|
||||||
if isinstance(slot.imports, list):
|
if isinstance(
|
||||||
|
slot.imports, list
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.imports = Imports(imports=slot.imports) + NamedImports
|
slot.imports = Imports(imports=slot.imports) + NamedImports
|
||||||
elif isinstance(slot.imports, Imports):
|
elif isinstance(slot.imports, Imports):
|
||||||
slot.imports += NamedImports
|
slot.imports += NamedImports
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
slot.imports = NamedImports
|
slot.imports = NamedImports
|
||||||
return slot
|
return slot
|
||||||
|
|
||||||
|
@ -268,16 +269,20 @@ class AfterGenerateClass:
|
||||||
if cls.cls.name == "DynamicTable":
|
if cls.cls.name == "DynamicTable":
|
||||||
cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"]
|
cls.cls.bases = ["DynamicTableMixin", "ConfiguredBaseModel"]
|
||||||
|
|
||||||
if cls.injected_classes is None:
|
if (
|
||||||
|
cls.injected_classes is None
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy()
|
cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy()
|
||||||
else:
|
else:
|
||||||
cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy())
|
cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy())
|
||||||
|
|
||||||
if isinstance(cls.imports, Imports):
|
if isinstance(cls.imports, Imports):
|
||||||
cls.imports += DYNAMIC_TABLE_IMPORTS
|
cls.imports += DYNAMIC_TABLE_IMPORTS
|
||||||
elif isinstance(cls.imports, list):
|
elif isinstance(
|
||||||
|
cls.imports, list
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS
|
cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
|
cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy()
|
||||||
elif cls.cls.name == "VectorData":
|
elif cls.cls.name == "VectorData":
|
||||||
cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"]
|
cls.cls.bases = ["VectorDataMixin", "ConfiguredBaseModel"]
|
||||||
|
@ -298,16 +303,20 @@ class AfterGenerateClass:
|
||||||
elif cls.cls.name == "TimeSeriesReferenceVectorData":
|
elif cls.cls.name == "TimeSeriesReferenceVectorData":
|
||||||
# in core.nwb.base, so need to inject and import again
|
# in core.nwb.base, so need to inject and import again
|
||||||
cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"]
|
cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"]
|
||||||
if cls.injected_classes is None:
|
if (
|
||||||
|
cls.injected_classes is None
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.injected_classes = TSRVD_INJECTS.copy()
|
cls.injected_classes = TSRVD_INJECTS.copy()
|
||||||
else:
|
else:
|
||||||
cls.injected_classes.extend(TSRVD_INJECTS.copy())
|
cls.injected_classes.extend(TSRVD_INJECTS.copy())
|
||||||
|
|
||||||
if isinstance(cls.imports, Imports):
|
if isinstance(cls.imports, Imports):
|
||||||
cls.imports += TSRVD_IMPORTS
|
cls.imports += TSRVD_IMPORTS
|
||||||
elif isinstance(cls.imports, list):
|
elif isinstance(
|
||||||
|
cls.imports, list
|
||||||
|
): # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS
|
cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS
|
||||||
else:
|
else: # pragma: no cover - for completeness, shouldn't happen
|
||||||
cls.imports = TSRVD_IMPORTS.model_copy()
|
cls.imports = TSRVD_IMPORTS.model_copy()
|
||||||
|
|
||||||
return cls
|
return cls
|
||||||
|
@ -362,28 +371,6 @@ class AfterGenerateClass:
|
||||||
return cls
|
return cls
|
||||||
|
|
||||||
|
|
||||||
def compile_python(
|
|
||||||
text_or_fn: str, package_path: Path = None, module_name: str = "test"
|
|
||||||
) -> ModuleType:
|
|
||||||
"""
|
|
||||||
Compile the text or file and return the resulting module
|
|
||||||
@param text_or_fn: Python text or file name that references python file
|
|
||||||
@param package_path: Root package path. If omitted and we've got a python file,
|
|
||||||
the package is the containing
|
|
||||||
directory
|
|
||||||
@return: Compiled module
|
|
||||||
"""
|
|
||||||
python_txt = file_text(text_or_fn)
|
|
||||||
if package_path is None and python_txt != text_or_fn:
|
|
||||||
package_path = Path(text_or_fn)
|
|
||||||
spec = compile(python_txt, "<string>", "exec")
|
|
||||||
module = ModuleType(module_name)
|
|
||||||
|
|
||||||
exec(spec, module.__dict__)
|
|
||||||
sys.modules[module_name] = module
|
|
||||||
return module
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_preserving_optional(annotation: str, wrap: str) -> str:
|
def wrap_preserving_optional(annotation: str, wrap: str) -> str:
|
||||||
"""
|
"""
|
||||||
Add a wrapping type to a type annotation string,
|
Add a wrapping type to a type annotation string,
|
||||||
|
@ -401,7 +388,5 @@ def wrap_preserving_optional(annotation: str, wrap: str) -> str:
|
||||||
annotation = is_optional.groups()[0]
|
annotation = is_optional.groups()[0]
|
||||||
annotation = f"Optional[{wrap}[{annotation}]]"
|
annotation = f"Optional[{wrap}[{annotation}]]"
|
||||||
else:
|
else:
|
||||||
if "Optional" in annotation:
|
|
||||||
pdb.set_trace()
|
|
||||||
annotation = f"{wrap}[{annotation}]"
|
annotation = f"{wrap}[{annotation}]"
|
||||||
return annotation
|
return annotation
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
"""
|
|
||||||
Types used with hdf5 io
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pydantic import GetCoreSchemaHandler
|
|
||||||
from pydantic_core import CoreSchema, core_schema
|
|
||||||
|
|
||||||
|
|
||||||
class HDF5_Path(str):
|
|
||||||
"""
|
|
||||||
Trivial subclass of string to indicate that it is a reference to a location within an HDF5 file
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __get_pydantic_core_schema__(
|
|
||||||
cls, source_type: Any, handler: GetCoreSchemaHandler
|
|
||||||
) -> CoreSchema:
|
|
||||||
return core_schema.no_info_after_validator_function(cls, handler(str))
|
|
|
@ -5,6 +5,8 @@ Note that since this is largely a subclass, we don't test all of the functionali
|
||||||
because it's tested in the base linkml package.
|
because it's tested in the base linkml package.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# ruff: noqa: F821 - until the tests here settle down
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import typing
|
import typing
|
||||||
|
@ -16,7 +18,7 @@ import pytest
|
||||||
from numpydantic.ndarray import NDArrayMeta
|
from numpydantic.ndarray import NDArrayMeta
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from nwb_linkml.generators.pydantic import NWBPydanticGenerator, compile_python
|
from nwb_linkml.generators.pydantic import NWBPydanticGenerator
|
||||||
|
|
||||||
from ..fixtures import (
|
from ..fixtures import (
|
||||||
TestSchemas,
|
TestSchemas,
|
||||||
|
|
Loading…
Reference in a new issue