add monkeypatch to schemaview

This commit is contained in:
sneakers-the-rat 2023-09-11 22:23:42 -07:00
parent 6ff7e6baab
commit 054ca8fd5e
2 changed files with 63 additions and 6 deletions

View file

@ -38,6 +38,63 @@ def patch_npytyping():
dataframe.DataFrameMeta.__module__ = property(new_module_dataframe)
base_meta_classes.SubscriptableMeta._get_module = new_get_module
def patch_schemaview():
"""
Patch schemaview to correctly resolve multiple layers of relative imports.
References:
Returns:
"""
from typing import List
from functools import lru_cache
from linkml_runtime.utils.schemaview import SchemaView
from linkml_runtime.linkml_model import SchemaDefinitionName
@lru_cache()
def imports_closure(self, imports: bool = True, traverse=True, inject_metadata=True) -> List[SchemaDefinitionName]:
"""
Return all imports
:param traverse: if true, traverse recursively
:return: all schema names in the transitive reflexive imports closure
"""
if not imports:
return [self.schema.name]
if self.schema_map is None:
self.schema_map = {self.schema.name: self.schema}
closure = []
visited = set()
todo = [self.schema.name]
if not traverse:
return todo
while len(todo) > 0:
sn = todo.pop()
visited.add(sn)
if sn not in self.schema_map:
imported_schema = self.load_import(sn)
self.schema_map[sn] = imported_schema
s = self.schema_map[sn]
if sn not in closure:
closure.append(sn)
for i in s.imports:
if sn.startswith('.') and ':' not in i:
# prepend the relative part
i = '/'.join(sn.split('/')[:-1]) + '/' + i
if i not in visited:
todo.append(i)
if inject_metadata:
for s in self.schema_map.values():
for x in {**s.classes, **s.enums, **s.slots, **s.subsets, **s.types}.values():
x.from_schema = s.id
for c in s.classes.values():
for a in c.attributes.values():
a.from_schema = s.id
return closure
SchemaView.imports_closure = imports_closure
def apply_patches():
patch_npytyping()
patch_schemaview()

View file

@ -7,10 +7,9 @@ an NWB file, we need a bit of infrastructure for generating and caching
pydantic models on the fly.
Relationship to other modules:
- :mod:`.adapters` manage the conversion from NWB schema language to linkML.
- :mod:`.generators` create models like pydantic models from the linkML schema
- :mod:`.providers` then use ``adapters`` and ``generators`` to provide models
from generated schema!
* :mod:`.adapters` manage the conversion from NWB schema language to linkML.
* :mod:`.generators` create models like pydantic models from the linkML schema
* :mod:`.providers` then use ``adapters`` and ``generators`` to provide models from generated schema!
"""
import pdb
from typing import Dict, TypedDict, List, Optional, Literal, TypeVar, Any, Dict
@ -155,7 +154,7 @@ class LinkMLProvider(Provider):
Like other :class:`.Provider` classes, this model is not a singleton but
behaves a bit like one in that when instantiated without arguments
it is stateless (except for configuration by environment-level variables).
So we don't use ``@classmethod``s here, but instantiating the class should remain
So we don't use ``@classmethod`` s here, but instantiating the class should remain
cheap.
Namespaces can be built from:
@ -164,6 +163,7 @@ class LinkMLProvider(Provider):
* dictionaries, as are usually packaged in nwb files: :meth:`.build_from_dicts`
All of which feed into...
* :class:`~.adapters.NamespacesAdapter` used throughout the rest of ``nwb_linkml`` - :meth:`.build`
After a namespace is built, it can be accessed using :meth:`.LinkMLProvider.get`, which