Skip to content

Commit

Permalink
Fix unit test
Browse files Browse the repository at this point in the history
  • Loading branch information
humpydonkey committed May 16, 2024
1 parent 8a6b690 commit 2cdaefb
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 5 deletions.
9 changes: 8 additions & 1 deletion tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def openai_llm_mock(request):
content = request.param
# Note the path here is adjusted to where OpenAI is used, not where it's defined
with patch("vision_agent.llm.llm.wrap_openai") as mock:
with patch("vision_agent.llm.llm.OpenAI") as mock:
# Setup a mock response structure that matches what your code expects
mock_instance = mock.return_value
mock_instance.chat.completions.create.return_value = MagicMock(
Expand All @@ -18,6 +18,13 @@ def openai_llm_mock(request):
yield mock_instance


@pytest.fixture
def langsmith_wrap_oepnai_mock(request, openai_llm_mock):
with patch("vision_agent.llm.llm.wrap_openai") as mock:
mock.return_value = openai_llm_mock
yield mock


@pytest.fixture
def openai_lmm_mock(request):
content = request.param
Expand Down
7 changes: 4 additions & 3 deletions tests/test_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@
clip_mock,
grounding_dino_mock,
grounding_sam_mock,
langsmith_wrap_oepnai_mock,
openai_llm_mock,
)


@pytest.mark.parametrize(
"openai_llm_mock", ["mocked response"], indirect=["openai_llm_mock"]
)
def test_generate_with_mock(openai_llm_mock): # noqa: F811
def test_generate_with_mock(openai_llm_mock, langsmith_wrap_oepnai_mock): # noqa: F811
llm = OpenAILLM()
response = llm.generate("test prompt")
assert response == "mocked response"
Expand All @@ -26,7 +27,7 @@ def test_generate_with_mock(openai_llm_mock): # noqa: F811
@pytest.mark.parametrize(
"openai_llm_mock", ["mocked response"], indirect=["openai_llm_mock"]
)
def test_chat_with_mock(openai_llm_mock): # noqa: F811
def test_chat_with_mock(openai_llm_mock, langsmith_wrap_oepnai_mock): # noqa: F811
llm = OpenAILLM()
response = llm.chat([{"role": "user", "content": "test prompt"}])
assert response == "mocked response"
Expand All @@ -52,7 +53,7 @@ def openai_llm_mock_turbo(openai_llm_mock_2): # noqa: F811
@pytest.mark.parametrize(
"openai_llm_mock", ["mocked response"], indirect=["openai_llm_mock"]
)
def test_call_with_mock(openai_llm_mock): # noqa: F811
def test_call_with_mock(openai_llm_mock, langsmith_wrap_oepnai_mock): # noqa: F811
llm = OpenAILLM()
response = llm("test prompt")
assert response == "mocked response"
Expand Down
2 changes: 1 addition & 1 deletion vision_agent/agent/vision_agent_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, Union

import pandas as pd
from langsmith import traceable
from rich.console import Console
from rich.syntax import Syntax
from tabulate import tabulate
from langsmith import traceable

from vision_agent.agent import Agent
from vision_agent.agent.vision_agent_v2_prompt import (
Expand Down
1 change: 1 addition & 0 deletions vision_agent/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
from abc import ABC, abstractmethod
from typing import Any, Callable, Dict, List, Mapping, Optional, Union, cast

from langsmith.wrappers import wrap_openai
from openai import AzureOpenAI, OpenAI

Expand Down

0 comments on commit 2cdaefb

Please sign in to comment.