Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
sneakers-the-rat committed Jul 9, 2024
1 parent f4d397c commit b6af8c9
Show file tree
Hide file tree
Showing 8 changed files with 31 additions and 23 deletions.
27 changes: 17 additions & 10 deletions nwb_linkml/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,17 @@
"""
Test fixtures primarily for doctests for adapters
"""

import re
import textwrap
from doctest import NORMALIZE_WHITESPACE, ELLIPSIS
from sybil import Document
from sybil import Sybil, Region
from doctest import ELLIPSIS, NORMALIZE_WHITESPACE
from typing import Generator

import yaml
from sybil import Document, Example, Region, Sybil
from sybil.parsers.codeblock import PythonCodeBlockParser
from sybil.parsers.doctest import DocTestParser
import yaml

from nwb_linkml import adapters

# Test adapter generation examples
Expand All @@ -24,7 +30,7 @@ def _strip_nwb(nwb: str) -> str:
return nwb


def test_adapter_block(example):
def test_adapter_block(example: Example) -> None:
"""
The linkml generated from a nwb example input should match
that provided in the docstring.
Expand All @@ -44,10 +50,13 @@ def test_adapter_block(example):
assert generated == expected


def parse_adapter_blocks(document: Document):
def parse_adapter_blocks(document: Document) -> Generator[Region, None, None]:
"""
Parse blocks with adapter directives, yield to test with :func:`.test_adapter_block`
"""
for start_match, end_match, source in document.find_region_sources(ADAPTER_START, ADAPTER_END):
# parse
sections = re.split(r":\w+?:", source, re.MULTILINE)
sections = re.split(r":\w+?:", source, flags=re.MULTILINE)
sections = [textwrap.dedent(section).strip() for section in sections]

sections[1] = _strip_nwb(sections[1])
Expand All @@ -56,9 +65,7 @@ def parse_adapter_blocks(document: Document):


adapter_parser = Sybil(
parsers=[
parse_adapter_blocks
],
parsers=[parse_adapter_blocks],
patterns=["adapters/*.py"],
)

Expand Down
2 changes: 1 addition & 1 deletion nwb_linkml/src/nwb_linkml/adapters/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
Union,
)

from linkml_runtime.dumpers import yaml_dumper
from linkml_runtime.linkml_model import (
ClassDefinition,
Definition,
SchemaDefinition,
SlotDefinition,
TypeDefinition,
)
from linkml_runtime.dumpers import yaml_dumper
from pydantic import BaseModel

from nwb_schema_language import Attribute, Dataset, Group, Schema
Expand Down
5 changes: 2 additions & 3 deletions nwb_linkml/src/nwb_linkml/adapters/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,15 @@
"""

from abc import abstractmethod
from typing import Type, TypeVar, List, Optional
from typing import List, Optional, Type, TypeVar

from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition
from pydantic import field_validator


from nwb_linkml.adapters.adapter import Adapter, BuildResult
from nwb_linkml.maps import QUANTITY_MAP
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, Group, ReferenceDtype, FlatDtype
from nwb_schema_language import CompoundDtype, Dataset, DTypeType, FlatDtype, Group, ReferenceDtype

T = TypeVar("T", bound=Type[Dataset] | Type[Group])
TI = TypeVar("TI", bound=Dataset | Group)
Expand Down
14 changes: 8 additions & 6 deletions nwb_linkml/src/nwb_linkml/adapters/dataset.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Adapter for NWB datasets to linkml Classes
"""

from abc import abstractmethod
from typing import ClassVar, Optional, Type

Expand All @@ -14,7 +15,7 @@
from nwb_linkml.maps import QUANTITY_MAP, Map
from nwb_linkml.maps.dtype import flat_to_linkml
from nwb_linkml.maps.naming import camel_to_snake
from nwb_schema_language import Dataset, CompoundDtype
from nwb_schema_language import CompoundDtype, Dataset


class DatasetMap(Map):
Expand Down Expand Up @@ -141,9 +142,9 @@ class MapScalarAttributes(DatasetMap):
:linkml:
classes:
- name: starting_time
description: Timestamp of the first sample in seconds. When timestamps are uniformly
spaced, the timestamp of the first sample can be specified and all subsequent
ones calculated from the sampling rate attribute.
description: Timestamp of the first sample in seconds. When timestamps are
uniformly spaced, the timestamp of the first sample can be specified and all
subsequent ones calculated from the sampling rate attribute.
attributes:
name:
name: name
Expand Down Expand Up @@ -328,8 +329,8 @@ class MapArraylike(DatasetMap):
- null
- null
- null
doc: Binary data representing images across frames. If data are stored in an external
file, this should be an empty 3D array.
doc: Binary data representing images across frames. If data are stored in an
external file, this should be an empty 3D array.
:linkml:
slots:
- name: data
Expand Down Expand Up @@ -754,6 +755,7 @@ def is_1d(cls: Dataset) -> bool:


def is_compound(cls: Dataset) -> bool:
"""Check if dataset has a compound dtype"""
return (
isinstance(cls.dtype, list)
and len(cls.dtype) > 0
Expand Down
2 changes: 1 addition & 1 deletion nwb_linkml/tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def tmp_output_dir() -> Path:
path = Path(__file__).parent.resolve() / "__tmp__"
if path.exists():
for subdir in path.iterdir():
if subdir.name == 'git':
if subdir.name == "git":
# don't wipe out git repos every time, they don't rly change
continue
elif subdir.is_file() and subdir.parent != path:
Expand Down
2 changes: 1 addition & 1 deletion nwb_linkml/tests/test_adapters/test_adapter_dataset.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest
from nwb_linkml.adapters.dataset import MapScalar

from nwb_linkml.adapters.dataset import MapScalar
from nwb_schema_language import Dataset


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,4 +265,3 @@ class Dataset(DtypeMixin):
dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(
default_factory=list
)

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ target-version = "py311"
include = ["nwb_linkml/**/*.py", "nwb_schema_language/src/**/*.py", "pyproject.toml"]
exclude = [
"docs",
"nwb_linkml/src/nwb_linkml/models/**/*.py",
"nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_language.py",
"nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py",
"tests/__tmp__/**/*"
Expand Down

0 comments on commit b6af8c9

Please sign in to comment.