Skip to content

Commit 4b10dee

Browse files
authored
Structured outputs support with examples (#354)
1 parent e956a33 commit 4b10dee

8 files changed

+355
-18
lines changed

README.md

-3
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,6 @@ See [_types.py](ollama/_types.py) for more information on the response types.
3737

3838
Response streaming can be enabled by setting `stream=True`.
3939

40-
> [!NOTE]
41-
> Streaming Tool/Function calling is not yet supported.
42-
4340
```python
4441
from ollama import chat
4542

examples/README.md

+6
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,12 @@ python3 examples/<example>.py
3030
- [multimodal_generate.py](multimodal_generate.py)
3131

3232

33+
### Structured Outputs - Generate structured outputs with a model
34+
- [structured-outputs.py](structured-outputs.py)
35+
- [async-structured-outputs.py](async-structured-outputs.py)
36+
- [structured-outputs-image.py](structured-outputs-image.py)
37+
38+
3339
### Ollama List - List all downloaded models and their properties
3440
- [list.py](list.py)
3541

examples/async-structured-outputs.py

+32
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
from pydantic import BaseModel
2+
from ollama import AsyncClient
3+
import asyncio
4+
5+
6+
# Define the schema for the response
7+
class FriendInfo(BaseModel):
8+
name: str
9+
age: int
10+
is_available: bool
11+
12+
13+
class FriendList(BaseModel):
14+
friends: list[FriendInfo]
15+
16+
17+
async def main():
18+
client = AsyncClient()
19+
response = await client.chat(
20+
model='llama3.1:8b',
21+
messages=[{'role': 'user', 'content': 'I have two friends. The first is Ollama 22 years old busy saving the world, and the second is Alonso 23 years old and wants to hang out. Return a list of friends in JSON format'}],
22+
format=FriendList.model_json_schema(), # Use Pydantic to generate the schema
23+
options={'temperature': 0}, # Make responses more deterministic
24+
)
25+
26+
# Use Pydantic to validate the response
27+
friends_response = FriendList.model_validate_json(response.message.content)
28+
print(friends_response)
29+
30+
31+
if __name__ == '__main__':
32+
asyncio.run(main())

examples/structured-outputs-image.py

+50
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
from pathlib import Path
2+
from pydantic import BaseModel
3+
from typing import List, Optional, Literal
4+
from ollama import chat
5+
from rich import print
6+
7+
8+
# Define the schema for image objects
9+
class Object(BaseModel):
10+
name: str
11+
confidence: float
12+
attributes: Optional[dict] = None
13+
14+
15+
class ImageDescription(BaseModel):
16+
summary: str
17+
objects: List[Object]
18+
scene: str
19+
colors: List[str]
20+
time_of_day: Literal['Morning', 'Afternoon', 'Evening', 'Night']
21+
setting: Literal['Indoor', 'Outdoor', 'Unknown']
22+
text_content: Optional[str] = None
23+
24+
25+
# Get path from user input
26+
path = input('Enter the path to your image: ')
27+
path = Path(path)
28+
29+
# Verify the file exists
30+
if not path.exists():
31+
raise FileNotFoundError(f'Image not found at: {path}')
32+
33+
# Set up chat as usual
34+
response = chat(
35+
model='llama3.2-vision',
36+
format=ImageDescription.model_json_schema(), # Pass in the schema for the response
37+
messages=[
38+
{
39+
'role': 'user',
40+
'content': 'Analyze this image and return a detailed JSON description including objects, scene, colors and any text detected. If you cannot determine certain details, leave those fields empty.',
41+
'images': [path],
42+
},
43+
],
44+
options={'temperature': 0}, # Set temperature to 0 for more deterministic output
45+
)
46+
47+
48+
# Convert received content to the schema
49+
image_analysis = ImageDescription.model_validate_json(response.message.content)
50+
print(image_analysis)

examples/structured-outputs.py

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from ollama import chat
2+
from pydantic import BaseModel
3+
4+
5+
# Define the schema for the response
6+
class FriendInfo(BaseModel):
7+
name: str
8+
age: int
9+
is_available: bool
10+
11+
12+
class FriendList(BaseModel):
13+
friends: list[FriendInfo]
14+
15+
16+
# schema = {'type': 'object', 'properties': {'friends': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'age': {'type': 'integer'}, 'is_available': {'type': 'boolean'}}, 'required': ['name', 'age', 'is_available']}}}, 'required': ['friends']}
17+
response = chat(
18+
model='llama3.1:8b',
19+
messages=[{'role': 'user', 'content': 'I have two friends. The first is Ollama 22 years old busy saving the world, and the second is Alonso 23 years old and wants to hang out. Return a list of friends in JSON format'}],
20+
format=FriendList.model_json_schema(), # Use Pydantic to generate the schema or format=schema
21+
options={'temperature': 0}, # Make responses more deterministic
22+
)
23+
24+
# Use Pydantic to validate the response
25+
friends_response = FriendList.model_validate_json(response.message.content)
26+
print(friends_response)

ollama/_client.py

+14-13
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@
2323

2424
import sys
2525

26+
from pydantic.json_schema import JsonSchemaValue
27+
2628

2729
from ollama._utils import convert_function_to_tool
2830

@@ -186,7 +188,7 @@ def generate(
186188
context: Optional[Sequence[int]] = None,
187189
stream: Literal[False] = False,
188190
raw: bool = False,
189-
format: Optional[Literal['', 'json']] = None,
191+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
190192
images: Optional[Sequence[Union[str, bytes]]] = None,
191193
options: Optional[Union[Mapping[str, Any], Options]] = None,
192194
keep_alive: Optional[Union[float, str]] = None,
@@ -204,7 +206,7 @@ def generate(
204206
context: Optional[Sequence[int]] = None,
205207
stream: Literal[True] = True,
206208
raw: bool = False,
207-
format: Optional[Literal['', 'json']] = None,
209+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
208210
images: Optional[Sequence[Union[str, bytes]]] = None,
209211
options: Optional[Union[Mapping[str, Any], Options]] = None,
210212
keep_alive: Optional[Union[float, str]] = None,
@@ -221,7 +223,7 @@ def generate(
221223
context: Optional[Sequence[int]] = None,
222224
stream: bool = False,
223225
raw: Optional[bool] = None,
224-
format: Optional[Literal['', 'json']] = None,
226+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
225227
images: Optional[Sequence[Union[str, bytes]]] = None,
226228
options: Optional[Union[Mapping[str, Any], Options]] = None,
227229
keep_alive: Optional[Union[float, str]] = None,
@@ -265,7 +267,7 @@ def chat(
265267
*,
266268
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
267269
stream: Literal[False] = False,
268-
format: Optional[Literal['', 'json']] = None,
270+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
269271
options: Optional[Union[Mapping[str, Any], Options]] = None,
270272
keep_alive: Optional[Union[float, str]] = None,
271273
) -> ChatResponse: ...
@@ -278,7 +280,7 @@ def chat(
278280
*,
279281
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
280282
stream: Literal[True] = True,
281-
format: Optional[Literal['', 'json']] = None,
283+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
282284
options: Optional[Union[Mapping[str, Any], Options]] = None,
283285
keep_alive: Optional[Union[float, str]] = None,
284286
) -> Iterator[ChatResponse]: ...
@@ -290,7 +292,7 @@ def chat(
290292
*,
291293
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
292294
stream: bool = False,
293-
format: Optional[Literal['', 'json']] = None,
295+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None,
294296
options: Optional[Union[Mapping[str, Any], Options]] = None,
295297
keep_alive: Optional[Union[float, str]] = None,
296298
) -> Union[ChatResponse, Iterator[ChatResponse]]:
@@ -327,7 +329,6 @@ def add_two_numbers(a: int, b: int) -> int:
327329
328330
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
329331
"""
330-
331332
return self._request(
332333
ChatResponse,
333334
'POST',
@@ -689,7 +690,7 @@ async def generate(
689690
context: Optional[Sequence[int]] = None,
690691
stream: Literal[False] = False,
691692
raw: bool = False,
692-
format: Optional[Literal['', 'json']] = None,
693+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
693694
images: Optional[Sequence[Union[str, bytes]]] = None,
694695
options: Optional[Union[Mapping[str, Any], Options]] = None,
695696
keep_alive: Optional[Union[float, str]] = None,
@@ -707,7 +708,7 @@ async def generate(
707708
context: Optional[Sequence[int]] = None,
708709
stream: Literal[True] = True,
709710
raw: bool = False,
710-
format: Optional[Literal['', 'json']] = None,
711+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
711712
images: Optional[Sequence[Union[str, bytes]]] = None,
712713
options: Optional[Union[Mapping[str, Any], Options]] = None,
713714
keep_alive: Optional[Union[float, str]] = None,
@@ -724,7 +725,7 @@ async def generate(
724725
context: Optional[Sequence[int]] = None,
725726
stream: bool = False,
726727
raw: Optional[bool] = None,
727-
format: Optional[Literal['', 'json']] = None,
728+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
728729
images: Optional[Sequence[Union[str, bytes]]] = None,
729730
options: Optional[Union[Mapping[str, Any], Options]] = None,
730731
keep_alive: Optional[Union[float, str]] = None,
@@ -767,7 +768,7 @@ async def chat(
767768
*,
768769
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
769770
stream: Literal[False] = False,
770-
format: Optional[Literal['', 'json']] = None,
771+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
771772
options: Optional[Union[Mapping[str, Any], Options]] = None,
772773
keep_alive: Optional[Union[float, str]] = None,
773774
) -> ChatResponse: ...
@@ -780,7 +781,7 @@ async def chat(
780781
*,
781782
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
782783
stream: Literal[True] = True,
783-
format: Optional[Literal['', 'json']] = None,
784+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
784785
options: Optional[Union[Mapping[str, Any], Options]] = None,
785786
keep_alive: Optional[Union[float, str]] = None,
786787
) -> AsyncIterator[ChatResponse]: ...
@@ -792,7 +793,7 @@ async def chat(
792793
*,
793794
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
794795
stream: bool = False,
795-
format: Optional[Literal['', 'json']] = None,
796+
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
796797
options: Optional[Union[Mapping[str, Any], Options]] = None,
797798
keep_alive: Optional[Union[float, str]] = None,
798799
) -> Union[ChatResponse, AsyncIterator[ChatResponse]]:

ollama/_types.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from datetime import datetime
55
from typing import Any, Mapping, Optional, Union, Sequence
66

7+
from pydantic.json_schema import JsonSchemaValue
78
from typing_extensions import Annotated, Literal
89

910
from pydantic import (
@@ -150,7 +151,7 @@ class BaseGenerateRequest(BaseStreamableRequest):
150151
options: Optional[Union[Mapping[str, Any], Options]] = None
151152
'Options to use for the request.'
152153

153-
format: Optional[Literal['', 'json']] = None
154+
format: Optional[Union[Literal['json'], JsonSchemaValue]] = None
154155
'Format of the response.'
155156

156157
keep_alive: Optional[Union[float, str]] = None

0 commit comments

Comments
 (0)