Skip to content

Commit

Permalink
fixed flake8
Browse files Browse the repository at this point in the history
  • Loading branch information
dillonalaird committed Mar 12, 2024
1 parent c7412c8 commit 6540122
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 13 deletions.
3 changes: 0 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,6 @@ log_cli_level = "INFO"
log_cli_format = "%(asctime)s [%(levelname)s] %(message)s (%(filename)s:%(lineno)s)"
log_cli_date_format = "%Y-%m-%d %H:%M:%S"

[tool.flake8]
exclude = "tests/*"

[tool.black]
exclude = '.vscode|.eggs|venv'
line-length = 88 # suggested by black official site
Expand Down
10 changes: 5 additions & 5 deletions tests/test_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
from vision_agent.tools import CLIP
from vision_agent.tools.tools import GroundingDINO

from .fixtures import openai_llm_mock
from .fixtures import openai_llm_mock # noqa: F401


@pytest.mark.parametrize(
"openai_llm_mock", ["mocked response"], indirect=["openai_llm_mock"]
)
def test_generate_with_mock(openai_llm_mock):
def test_generate_with_mock(openai_llm_mock): # noqa: F811
llm = OpenAILLM()
response = llm.generate("test prompt")
assert response == "mocked response"
Expand All @@ -25,7 +25,7 @@ def test_generate_with_mock(openai_llm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_llm_mock"],
)
def test_generate_classifier(openai_llm_mock):
def test_generate_classifier(openai_llm_mock): # noqa: F811
llm = OpenAILLM()
prompt = "Can you generate a cat classifier?"
classifier = llm.generate_classifier(prompt)
Expand All @@ -38,7 +38,7 @@ def test_generate_classifier(openai_llm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_llm_mock"],
)
def test_generate_detector(openai_llm_mock):
def test_generate_detector(openai_llm_mock): # noqa: F811
llm = OpenAILLM()
prompt = "Can you generate a cat detector?"
detector = llm.generate_detector(prompt)
Expand All @@ -51,7 +51,7 @@ def test_generate_detector(openai_llm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_llm_mock"],
)
def test_generate_segmentor(openai_llm_mock):
def test_generate_segmentor(openai_llm_mock): # noqa: F811
llm = OpenAILLM()
prompt = "Can you generate a cat segmentor?"
segmentor = llm.generate_detector(prompt)
Expand Down
10 changes: 5 additions & 5 deletions tests/test_lmm.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from vision_agent.lmm.lmm import OpenAILMM
from vision_agent.tools import CLIP, GroundingDINO, GroundingSAM

from .fixtures import openai_lmm_mock
from .fixtures import openai_lmm_mock # noqa: F401


def create_temp_image(image_format="jpeg"):
Expand All @@ -20,7 +20,7 @@ def create_temp_image(image_format="jpeg"):
@pytest.mark.parametrize(
"openai_lmm_mock", ["mocked response"], indirect=["openai_lmm_mock"]
)
def test_generate_with_mock(openai_lmm_mock):
def test_generate_with_mock(openai_lmm_mock): # noqa: F811
temp_image = create_temp_image()
lmm = OpenAILMM()
response = lmm.generate("test prompt", image=temp_image)
Expand All @@ -38,7 +38,7 @@ def test_generate_with_mock(openai_lmm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_lmm_mock"],
)
def test_generate_classifier(openai_lmm_mock):
def test_generate_classifier(openai_lmm_mock): # noqa: F811
lmm = OpenAILMM()
prompt = "Can you generate a cat classifier?"
classifier = lmm.generate_classifier(prompt)
Expand All @@ -51,7 +51,7 @@ def test_generate_classifier(openai_lmm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_lmm_mock"],
)
def test_generate_classifier(openai_lmm_mock):
def test_generate_classifier(openai_lmm_mock): # noqa: F811
lmm = OpenAILMM()
prompt = "Can you generate a cat classifier?"
detector = lmm.generate_detector(prompt)
Expand All @@ -64,7 +64,7 @@ def test_generate_classifier(openai_lmm_mock):
['{"Parameters": {"prompt": "cat"}}'],
indirect=["openai_lmm_mock"],
)
def test_generate_classifier(openai_lmm_mock):
def test_generate_classifier(openai_lmm_mock): # noqa: F811
lmm = OpenAILMM()
prompt = "Can you generate a cat classifier?"
segmentor = lmm.generate_segmentor(prompt)
Expand Down

0 comments on commit 6540122

Please sign in to comment.