Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

roll down parent inheritance recursively #14

Merged
merged 18 commits into from
Oct 1, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
v0.2.0 of nwb_schema_language - parentization
sneakers-the-rat committed Sep 13, 2024
commit 880352d9a4470bd67b45007d25fe2754388adda7
4 changes: 4 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -46,6 +46,10 @@ jobs:
run: pytest
working-directory: nwb_linkml

- name: Run nwb_schema_language Tests
run: pytest
working-directory: nwb_schema_language

- name: Coveralls Parallel
uses: coverallsapp/github-action@v2.3.0
if: runner.os != 'macOS'
2 changes: 1 addition & 1 deletion nwb_linkml/pyproject.toml
Original file line number Diff line number Diff line change
@@ -12,7 +12,7 @@ dependencies = [
"nwb-models>=0.2.0",
"pyyaml>=6.0",
"linkml-runtime>=1.7.7",
"nwb-schema-language>=0.1.3",
"nwb-schema-language>=0.2.0",
"rich>=13.5.2",
#"linkml>=1.7.10",
"linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml",
4 changes: 4 additions & 0 deletions nwb_linkml/src/nwb_linkml/adapters/adapter.py
Original file line number Diff line number Diff line change
@@ -170,6 +170,10 @@ def walk(
# so skip to avoid combinatoric walking
if key == "imports" and type(input).__name__ == "SchemaAdapter":
continue
# nwb_schema_language objects have a reference to their parent,
# which causes cycles
if key == "parent":
continue
val = getattr(input, key)
yield (key, val)
if isinstance(val, (BaseModel, dict, list)):
20 changes: 11 additions & 9 deletions nwb_linkml/src/nwb_linkml/adapters/group.py
Original file line number Diff line number Diff line change
@@ -29,7 +29,7 @@ def build(self) -> BuildResult:
"""
# Handle container groups with only * quantity unnamed groups
if (
len(self.cls.groups) > 0
self.cls.groups
and not self.cls.links
and all([self._check_if_container(g) for g in self.cls.groups])
): # and \
@@ -38,8 +38,8 @@ def build(self) -> BuildResult:

# handle if we are a terminal container group without making a new class
if (
len(self.cls.groups) == 0
and len(self.cls.datasets) == 0
not self.cls.groups
and not self.cls.datasets
and self.cls.neurodata_type_inc is not None
and self.parent is not None
):
@@ -177,15 +177,17 @@ def build_subclasses(self) -> BuildResult:
# Datasets are simple, they are terminal classes, and all logic
# for creating slots vs. classes is handled by the adapter class
dataset_res = BuildResult()
for dset in self.cls.datasets:
dset_adapter = DatasetAdapter(cls=dset, parent=self)
dataset_res += dset_adapter.build()
if self.cls.datasets:
for dset in self.cls.datasets:
dset_adapter = DatasetAdapter(cls=dset, parent=self)
dataset_res += dset_adapter.build()

group_res = BuildResult()

for group in self.cls.groups:
group_adapter = GroupAdapter(cls=group, parent=self)
group_res += group_adapter.build()
if self.cls.groups:
for group in self.cls.groups:
group_adapter = GroupAdapter(cls=group, parent=self)
group_res += group_adapter.build()

res = dataset_res + group_res

65 changes: 63 additions & 2 deletions nwb_linkml/src/nwb_linkml/adapters/namespaces.py
Original file line number Diff line number Diff line change
@@ -9,11 +9,12 @@
from copy import copy
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
from typing import Dict, Generator, List, Optional

from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import Annotation, SchemaDefinition
from pydantic import Field, model_validator
import networkx as nx

from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.adapters.schema import SchemaAdapter
@@ -31,6 +32,9 @@ class NamespacesAdapter(Adapter):
schemas: List[SchemaAdapter]
imported: List["NamespacesAdapter"] = Field(default_factory=list)

_completed: bool = False
"""whether we have run the :meth:`.complete_namespace` method"""

@classmethod
def from_yaml(cls, path: Path) -> "NamespacesAdapter":
"""
@@ -65,7 +69,7 @@ def from_yaml(cls, path: Path) -> "NamespacesAdapter":
needed_adapter = NamespacesAdapter.from_yaml(needed_source_ns)
ns_adapter.imported.append(needed_adapter)

ns_adapter.populate_imports()
ns_adapter.complete_namespaces()

return ns_adapter

@@ -76,6 +80,9 @@ def build(
Build the NWB namespace to the LinkML Schema
"""

if not self._completed:
self.complete_namespaces()

sch_result = BuildResult()
for sch in self.schemas:
if progress is not None:
@@ -149,6 +156,50 @@ def _populate_schema_namespaces(self) -> None:
break
return self

def complete_namespaces(self):
"""
After loading the namespace, and after any imports have been added afterwards,
this must be called to complete the definitions of the contained schema objects.

This is not automatic because NWB doesn't have a formal dependency resolution system,
so it is often impossible to know which imports are needed until after the namespace
adapter has been instantiated.

It **is** automatically called if it hasn't been already by the :meth:`.build` method.
"""
self.populate_imports()
self._roll_down_inheritance()

for i in self.imported:
i.complete_namespaces()

self._completed = True

def _roll_down_inheritance(self):
"""
nwb-schema-language inheritance doesn't work like normal python inheritance -
instead of inheriting everything at the 'top level' of a class, it also
recursively merges all properties from the parent objects.

References:
https://github.com/NeurodataWithoutBorders/pynwb/issues/1954
"""
pass

def inheritance_graph(self) -> nx.DiGraph:
"""
Make a graph of all ``neurodata_types`` in the namespace and imports such that
each node contains the group or dataset it describes,
and has directed edges pointing at all the classes that inherit from it.

In the case that the inheriting class does not itself have a ``neurodata_type_def``,
it is
"""
g = nx.DiGraph()
for sch in self.all_schemas():
for cls in sch.created_classes:
pass

def find_type_source(self, name: str) -> SchemaAdapter:
"""
Given some neurodata_type_inc, find the schema that it's defined in.
@@ -279,3 +330,13 @@ def schema_namespace(self, name: str) -> Optional[str]:
if name in sources:
return ns.name
return None

def all_schemas(self) -> Generator[SchemaAdapter, None, None]:
"""
Iterator over all schemas including imports
"""
for sch in self.schemas:
yield sch
for imported in self.imported:
for sch in imported:
yield sch
80 changes: 75 additions & 5 deletions nwb_linkml/tests/test_adapters/test_adapter_namespaces.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pytest

from nwb_linkml.adapters import SchemaAdapter
from pathlib import Path
from nwb_linkml.adapters import NamespacesAdapter, SchemaAdapter
from nwb_schema_language import Attribute, Group, Namespace, Dataset, Namespaces, Schema, FlatDtype


@pytest.mark.parametrize(
@@ -48,8 +49,7 @@ def test_skip_imports(nwb_core_fixture):
assert all([ns == "core" for ns in namespaces])


@pytest.mark.skip()
def test_populate_inheritance(nwb_core_fixture):
def test_roll_down_inheritance():
"""
Classes should receive and override the properties of their parents
when they have neurodata_type_inc
@@ -59,4 +59,74 @@ def test_populate_inheritance(nwb_core_fixture):
Returns:

"""
pass
parent_cls = Group(
neurodata_type_def="Parent",
doc="parent",
attributes=[
Attribute(name="a", dims=["a", "b"], shape=[1, 2], doc="a", value="a"),
Attribute(name="b", dims=["c", "d"], shape=[3, 4], doc="b", value="b"),
],
datasets=[
Dataset(
name="data",
dims=["a", "b"],
shape=[1, 2],
doc="data",
attributes=[
Attribute(name="c", dtype=FlatDtype.int32, doc="c"),
Attribute(name="d", dtype=FlatDtype.int32, doc="d"),
],
)
],
)
parent_sch = Schema(source="parent.yaml")
parent_ns = Namespaces(
namespaces=[
Namespace(
author="hey",
contact="sup",
name="parent",
doc="a parent",
version="1",
schema=[parent_sch],
)
]
)

child_cls = Group(
neurodata_type_def="Child",
neurodata_type_inc="Parent",
doc="child",
attributes=[Attribute(name="a", doc="a")],
datasets=[
Dataset(
name="data",
doc="data again",
attributes=[Attribute(name="a", doc="c", value="z"), Attribute(name="c", doc="c")],
)
],
)
child_sch = Schema(source="child.yaml")
child_ns = Namespaces(
namespaces=[
Namespace(
author="hey",
contact="sup",
name="child",
doc="a child",
version="1",
schema=[child_sch, Schema(namespace="parent")],
)
]
)

parent_schema_adapter = SchemaAdapter(path=Path("parent.yaml"), groups=[parent_cls])
parent_ns_adapter = NamespacesAdapter(namespaces=parent_ns, schemas=[parent_schema_adapter])
child_schema_adapter = SchemaAdapter(path=Path("child.yaml"), groups=[child_cls])
child_ns_adapter = NamespacesAdapter(
namespaces=child_ns, schemas=[child_schema_adapter], imported=[parent_ns_adapter]
)

child_ns_adapter.complete_namespaces()

child = child_ns_adapter.get("Child")
4 changes: 2 additions & 2 deletions nwb_schema_language/Makefile
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@ SHELL := bash
.SUFFIXES:
.SECONDARY:

RUN = poetry run
RUN = pdm run
# get values from about.yaml file
SCHEMA_NAME = $(shell ${SHELL} ./utils/get-value.sh name)
SOURCE_SCHEMA_PATH = $(shell ${SHELL} ./utils/get-value.sh source_schema_path)
@@ -107,7 +107,7 @@ gen-project: $(PYMODEL)
$(RUN) gen-project ${GEN_PARGS} -d $(DEST) $(SOURCE_SCHEMA_PATH) && mv $(DEST)/*.py $(PYMODEL)

gen-pydantic: $(PYMODEL)
$(RUN) gen-pydantic $(SOURCE_SCHEMA_PATH) --pydantic_version 2 > $(PYMODEL)/nwb_schema_pydantic.py
$(RUN) generate_pydantic
$(RUN) run_patches --phase post_generation_pydantic

test: test-schema test-python test-examples
3 changes: 2 additions & 1 deletion nwb_schema_language/pyproject.toml
Original file line number Diff line number Diff line change
@@ -9,7 +9,7 @@ dependencies = [
"linkml-runtime>=1.7.7",
"pydantic>=2.3.0",
]
version = "0.1.3"
version = "0.2.0"
description = "Translation of the nwb-schema-language to LinkML"
readme = "README.md"

@@ -20,6 +20,7 @@ documentation = "https://nwb-linkml.readthedocs.io"

[project.scripts]
run_patches = "nwb_schema_language.patches:main"
generate_pydantic = "nwb_schema_language.generator:generate"

[tool.pdm]
[tool.pdm.dev-dependencies]
4 changes: 2 additions & 2 deletions nwb_schema_language/src/nwb_schema_language/__init__.py
Original file line number Diff line number Diff line change
@@ -22,10 +22,10 @@
DTypeType = Union[List[CompoundDtype], FlatDtype, ReferenceDtype]


except (NameError, RecursionError):
except (NameError, RecursionError) as e:
warnings.warn(
"Error importing pydantic classes, passing because we might be in the process of patching"
" them, but it is likely they are broken and you will be unable to use them!",
f" them, but it is likely they are broken and you will be unable to use them!\n{e}",
stacklevel=1,
)

Loading