testing custom directive for doctests

This commit is contained in:
sneakers-the-rat 2024-07-08 19:07:21 -07:00
parent 0cbf1ac1b2
commit b9ed7c0026
Signed by untrusted user who does not match committer: jonny
GPG key ID: 6DCB96EF1E4D232D
12 changed files with 3212 additions and 37 deletions

0
docs/__init__.py Normal file
View file

View file

@ -13,6 +13,10 @@ author = 'Jonny Saunders'
release = 'v0.1.0'
import os
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent))
from directives import AdapterDirective
from sphinx.util.tags import Tags
tags: Tags
@ -35,12 +39,11 @@ extensions = [
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.venv']
if os.environ.get('SPHINX_MINIMAL', None) == 'True':
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**/models']
exclude_patterns.append('**/models')
tags.add('minimal')
else:
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
tags.add('full')
@ -124,3 +127,8 @@ from nwb_linkml.adapters import BuildResult
todo_include_todos = True
todo_link_only = True
def setup(app):
AdapterDirective.app = app
app.add_directive('adapter', AdapterDirective)
return {'parallel_read_safe': True, 'parallel_write_safe': True}

86
docs/directives.py Normal file
View file

@ -0,0 +1,86 @@
import codecs
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.parsers.rst import directives
from docutils.statemachine import StringList
from jinja2 import FileSystemLoader, Environment
import sphinx.util
TEMPLATE = """
.. grid:: 2
:gutter: 1
:margin: 0
:padding: 0
.. grid-item-card::
:margin: 0
NWB Schema
^^^
.. code-block:: yaml
{{ nwb }}
.. grid-item-card::
:margin: 0
LinkML
^^^
.. code-block:: yaml
{{ linkml }}
"""
class AdapterDirective(Directive):
"""
Directive for writing inline adapter doctests with pretty rendering :)
Based on sphinx-jinja: https://pypi.org/project/sphinx-jinja/
"""
has_content = True
optional_arguments = 1
option_spec = {
"nwb": directives.unchanged,
"linkml": directives.unchanged,
}
app = None
def run(self):
node = nodes.Element()
node.document = self.state.document
cxt = {
'nwb': self.options.get("nwb"),
'linkml': self.options.get("linkml")
}
template = Environment(
#**conf.jinja_env_kwargs
).from_string(TEMPLATE)
new_content = template.render(**cxt)
new_content = StringList(new_content.splitlines(), source='')
sphinx.util.nested_parse_with_titles(self.state, new_content, node)
return node.children
def debug_print(title, content):
stars = '*' * 10
print('\n{1} Begin Debug Output: {0} {1}'.format(title, stars))
print(content)
print('\n{1} End Debug Output: {0} {1}'.format(title, stars))
def setup(app):
AdapterDirective.app = app
app.add_directive('jinja', JinjaDirective)
app.add_config_value('jinja_contexts', {}, 'env')
app.add_config_value('jinja_base', app.srcdir, 'env')
app.add_config_value('jinja_env_kwargs', {}, 'env')
app.add_config_value('jinja_filters', {}, 'env')
app.add_config_value('jinja_tests', {}, 'env')
app.add_config_value('jinja_globals', {}, 'env')
app.add_config_value('jinja_policies', {}, 'env')
return {'parallel_read_safe': True, 'parallel_write_safe': True}

3034
docs/pdm.lock Normal file

File diff suppressed because it is too large Load diff

View file

@ -21,6 +21,7 @@ dependencies = [
"myst-nb @ git+https://github.com/executablebooks/MyST-NB.git",
"ipykernel>=6.25.2",
"ipywidgets>=8.1.1",
"sphinx-jinja>=2.0.2",
]
[build-system]
@ -35,3 +36,5 @@ check-hidden = true
# ignore-words-list = ''
[tool.pdm]
distribution = false

View file

@ -23,6 +23,7 @@ from linkml_runtime.linkml_model import (
SlotDefinition,
TypeDefinition,
)
from linkml_runtime.dumpers import yaml_dumper
from pydantic import BaseModel
from nwb_schema_language import Attribute, Dataset, Group, Schema
@ -90,6 +91,23 @@ class BuildResult:
return out_str
def as_linkml(self) -> str:
"""
Print build results as linkml-style YAML.
Note that only non-schema results will be included, as a schema
usually contains all the other types.
"""
output = {}
for label, alist in (("classes", self.classes),
("slots", self.slots),
("types", self.types)):
if not alist:
continue
output[label] = {a.name: a for a in alist}
return yaml_dumper.dumps(output)
class Adapter(BaseModel):
"""Abstract base class for adapters"""

View file

@ -10,6 +10,7 @@ from linkml_runtime.linkml_model.meta import (
DimensionExpression,
)
from nwb_linkml.maps.naming import snake_case
from nwb_linkml.types.nwb import DIMS_TYPE, SHAPE_TYPE
@ -87,7 +88,7 @@ class ArrayAdapter:
"""
Create the corresponding array specification from a shape
"""
dims = [DimensionExpression(alias=dim.dims, exact_cardinality=dim.shape) for dim in shape]
dims = [DimensionExpression(alias=snake_case(dim.dims), exact_cardinality=dim.shape) for dim in shape]
return ArrayExpression(dimensions=dims)
def make(self) -> List[ArrayExpression]:

View file

@ -3,25 +3,46 @@ Adapters to linkML classes
"""
from abc import abstractmethod
from typing import List, Optional
from typing import Type, TypeVar, List, Optional
from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from pydantic import field_validator
from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, Group, ReferenceDtype
T = TypeVar('T', bound=Type[Dataset] | Type[Group])
TI = TypeVar('TI', bound=Dataset | Group)
class ClassAdapter(Adapter):
"""
Abstract adapter to class-like things in linkml, holds methods common to
both DatasetAdapter and GroupAdapter
"""
TYPE: T
"""
The type that this adapter class handles
"""
cls: Dataset | Group
cls: TI
parent: Optional["ClassAdapter"] = None
@field_validator('cls', mode='before')
@classmethod
def cast_from_string(cls, value: str | TI) -> TI:
"""
Cast from YAML string to desired class
"""
if isinstance(value, str):
from nwb_linkml.io.schema import load_yaml
value = load_yaml(value)
value = cls.TYPE(**value)
return value
@abstractmethod
def build(self) -> BuildResult:
"""

View file

@ -1,12 +1,10 @@
"""
Adapter for NWB datasets to linkml Classes
"""
import pdb
from abc import abstractmethod
from typing import Optional, Type
from linkml_runtime.linkml_model.meta import (
ClassDefinition,
SlotDefinition,
)
@ -51,37 +49,23 @@ class MapScalar(DatasetMap):
Examples:
.. grid:: 2
:gutter: 1
:margin: 0
:padding: 0
.. adapter:: DatasetAdapter
:nwb:
datasets:
- name: MyScalar
doc: A scalar
dtype: int32
quantity: '?'
:linkml:
attributes:
- name: MyScalar
description: A scalar
multivalued: false
range: int32
required: false
.. grid-item-card::
:margin: 0
NWB Schema
^^^
.. code-block:: yaml
datasets:
- name: MyScalar
doc: A scalar
dtype: int32
quantity: '?'
.. grid-item-card::
:margin: 0
LinkML
^^^
.. code-block:: yaml
attributes:
- name: MyScalar
description: A scalar
multivalued: false
range: int32
required: false
"""
@ -463,6 +447,7 @@ class DatasetAdapter(ClassAdapter):
"""
Orchestrator class for datasets - calls the set of applicable mapping classes
"""
TYPE = Dataset
cls: Dataset

View file

@ -16,6 +16,7 @@ class GroupAdapter(ClassAdapter):
"""
Adapt NWB Groups to LinkML Classes
"""
TYPE = Group
cls: Group

View file

@ -5,13 +5,29 @@ String manipulation methods for names
import re
from pathlib import Path
CAMEL_TO_SNAKE = re.compile("((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))")
CAMEL_TO_SNAKE = re.compile(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))")
"""
Convert camel case to snake case
courtesy of: https://stackoverflow.com/a/12867228
"""
def snake_case(name: str | None) -> str | None:
"""
Snake caser for replacing all non-word characters with single underscores
Primarily used when creating dimension labels in
:class:`~nwb_linkml.adapters.ArrayAdapter` , see also :func:`.camel_to_snake`
for converting camelcased names.
"""
if name is None:
return None
name = name.strip()
name = re.sub(r'\W+', '_', name)
name = name.lower()
return name
def camel_to_snake(name: str) -> str:
"""

View file

@ -9,6 +9,8 @@ from sybil.parsers.rest import DocTestParser, PythonCodeBlockParser
from .fixtures import * # noqa: F403
# Test adapter generation examples
pytest_collect_file = Sybil(
parsers=[
DocTestParser(optionflags=ELLIPSIS + NORMALIZE_WHITESPACE),