update models to correctly handle ElementIdentifiers

This commit is contained in:
sneakers-the-rat 2024-09-02 18:46:02 -07:00
parent a6934276d2
commit 97135c14f2
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
31 changed files with 132 additions and 10 deletions

View file

@ -430,6 +430,10 @@ class MapArrayLikeAttributes(DatasetMap):
The most general case - treat everything that isn't handled by one of the special cases
as an array!
We specifically include classes that have no attributes but also don't have a name,
as they still require their own class (unlike :class:`.MapArrayLike` above, where we
just generate an anonymous slot.)
Examples:
.. adapter:: DatasetAdapter
@ -525,7 +529,7 @@ class MapArrayLikeAttributes(DatasetMap):
return (
all([cls.dims, cls.shape])
and cls.neurodata_type_inc != "VectorData"
and has_attrs(cls)
and (has_attrs(cls) or not cls.name)
and not is_compound(cls)
and (dtype == "AnyType" or dtype in flat_to_linkml)
)

View file

@ -56,6 +56,8 @@ class NamespacesAdapter(Adapter):
needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns)
ns_adapter.imported.append(needed_adapter)
ns_adapter.populate_imports()
return ns_adapter
def build(
@ -176,7 +178,6 @@ class NamespacesAdapter(Adapter):
else:
raise KeyError(f"No schema found that define {name}")
@model_validator(mode="after")
def populate_imports(self) -> "NamespacesAdapter":
"""
Populate the imports that are needed for each schema file

View file

@ -131,6 +131,8 @@ def load_namespace_adapter(
else:
adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch)
adapter.populate_imports()
return adapter

View file

@ -127,6 +127,7 @@ class LinkMLProvider(Provider):
for schema_needs in adapter.needed_imports.values():
for needed in schema_needs:
adapter.imported.append(ns_adapters[needed])
adapter.populate_imports()
# then do the build
res = {}

View file

@ -278,7 +278,7 @@ class PydanticProvider(Provider):
nwb_models.models.pydantic.{namespace}.{version}
"""
name_pieces = [
"nwb_linkml",
"nwb_models",
"models",
"pydantic",
module_case(namespace),

View file

@ -131,7 +131,7 @@ class SchemaProvider(Provider):
results = {}
for ns, ns_result in linkml_res.items():
results[ns] = pydantic_provider.build(
ns_result["namespace"], versions=self.versions, **pydantic_kwargs
ns_result.namespace, versions=self.versions, **pydantic_kwargs
)
return results

View file

@ -866,6 +866,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -866,6 +866,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -877,6 +877,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -858,6 +858,10 @@ class ElementIdentifiers(Data):
name: str = Field(
"element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}}
)
value: Optional[NDArray[Shape["* num_elements"], int]] = Field(
None,
json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_elements"}]}}},
)
class DynamicTableRegion(DynamicTableRegionMixin, VectorData):

View file

@ -86,6 +86,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -86,6 +86,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -114,6 +114,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -87,6 +87,12 @@ classes:
ifabsent: string(element_id)
range: string
required: true
value:
name: value
array:
dimensions:
- alias: num_elements
range: int
tree_root: true
DynamicTableRegion:
name: DynamicTableRegion

View file

@ -171,17 +171,11 @@ def generate_versions(
shutil.rmtree(tmp_dir / "linkml")
shutil.rmtree(tmp_dir / "pydantic")
# import the most recent version of the schemaz we built
latest_version = sorted((pydantic_path / "core").glob("v*"), key=os.path.getmtime)[-1]
# make inits to use the schema! we don't usually do this in the
# provider class because we directly import the files there.
with open(pydantic_path / "__init__.py", "w") as initfile:
initfile.write(" ")
with open(pydantic_path / "__init__.py", "w") as initfile:
initfile.write(f"from .pydantic.core.{latest_version.name}.namespace import *")
subprocess.run(["black", "."])
finally: