Skip to content

Commit 4a94bc1

Browse files
committed
fix: ollama support
1 parent 2d2b344 commit 4a94bc1

File tree

3 files changed

+17
-3
lines changed

3 files changed

+17
-3
lines changed

mcpx_py/__main__.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313

1414
from . import Chat
1515
from mcp_run import Client, ClientConfig
16+
from mcpx_pydantic_ai import openai_compatible_model
1617
from .chat import SYSTEM_PROMPT
1718
import pydantic_ai
1819

@@ -111,7 +112,19 @@ async def chat_loop(chat):
111112
async def chat_cmd(client, args):
112113
m = args.model
113114
if args.provider:
114-
m = f"{args.provider}:{m}"
115+
if args.provider == "ollama" or args.provider == "llama":
116+
host = os.environ.get(
117+
f"{args.model.upper()}_HOST",
118+
os.environ.get(
119+
"LLAMA_HOST",
120+
os.environ.get("OLLAMA_HOST", "http://127.0.0.1:11434"),
121+
),
122+
)
123+
if not host.endswith("/v1"):
124+
host += "/v1"
125+
m = openai_compatible_model(host, args.model)
126+
else:
127+
m = f"{args.provider}:{m}"
115128

116129
chat = Chat(
117130
m,

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "mcpx-py"
3-
version = "0.5.0"
3+
version = "0.5.1"
44
description = "An mcp.run client for Python"
55
readme = "README.md"
66
requires-python = ">=3.12"

uv.lock

Lines changed: 2 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)