Skip to content

Commit

Permalink
fixup! fix LLMChoice with improved openai schema
Browse files Browse the repository at this point in the history
  • Loading branch information
kyriediculous committed Jan 10, 2025
1 parent 807f477 commit a943e98
Show file tree
Hide file tree
Showing 5 changed files with 490 additions and 33 deletions.
7 changes: 4 additions & 3 deletions runner/app/routes/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,19 @@
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": HTTPError},
}


@router.post(
"/llm",
response_model=LLMResponse,
response_model=LLMResponse
,
responses=RESPONSES,
operation_id="genLLM",
description="Generate text using a language model.",
summary="LLM",
tags=["generate"],
openapi_extra={"x-speakeasy-name-override": "llm"},
)
@router.post("/llm/", response_model=LLMResponse, responses=RESPONSES, include_in_schema=False)
@router.post("/llm/", response_model=LLMResponse
, responses=RESPONSES, include_in_schema=False)
async def llm(
request: LLMRequest,
pipeline: Pipeline = Depends(get_pipeline),
Expand Down
26 changes: 20 additions & 6 deletions runner/app/routes/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,25 +77,39 @@ class LLMMessage(BaseModel):
content: str


class LLMChoice(BaseModel):
delta: Optional[LLMMessage]
message: Optional[LLMMessage]
class LLMBaseChoice(BaseModel):
index: int
finish_reason: Optional[str]
finish_reason: str = "" # Needs OpenAPI 3.1 support to make optional


class LLMTokenUsage(BaseModel):
prompt_tokens: int
completion_tokens: int
total_tokens: int

class LLMChoice(LLMBaseChoice):
delta: LLMMessage = None
message: LLMMessage = None

class LLMResponse(BaseModel):
choices: List[LLMChoice]
tokens_used: LLMTokenUsage
id: str
model: str
created: int
tokens_used: LLMTokenUsage
choices: List[LLMChoice]


# class LLMStreamChoice(LLMBaseChoice):
# delta: LLMMessage

# class LLMNonStreamChoice(LLMBaseChoice):
# message: LLMMessage

# class LLMStreamResponse(LLMBaseResponse):
# choices: List[LLMStreamChoice]

# class LLMNonStreamResponse(LLMBaseResponse):
# choices: List[LLMNonStreamChoice]


class LLMRequest(BaseModel):
Expand Down
25 changes: 13 additions & 12 deletions runner/gateway.openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -868,18 +868,19 @@ components:
description: Response model for text generation.
LLMChoice:
properties:
delta:
$ref: '#/components/schemas/LLMMessage'
index:
type: integer
title: Index
finish_reason:
type: string
title: Finish Reason
default: ''
delta:
$ref: '#/components/schemas/LLMMessage'
message:
$ref: '#/components/schemas/LLMMessage'
type: object
required:
- delta
- index
title: LLMChoice
LLMMessage:
Expand Down Expand Up @@ -932,13 +933,6 @@ components:
title: LLMRequest
LLMResponse:
properties:
choices:
items:
$ref: '#/components/schemas/LLMChoice'
type: array
title: Choices
tokens_used:
$ref: '#/components/schemas/LLMTokenUsage'
id:
type: string
title: Id
Expand All @@ -948,13 +942,20 @@ components:
created:
type: integer
title: Created
tokens_used:
$ref: '#/components/schemas/LLMTokenUsage'
choices:
items:
$ref: '#/components/schemas/LLMChoice'
type: array
title: Choices
type: object
required:
- choices
- tokens_used
- id
- model
- created
- tokens_used
- choices
title: LLMResponse
LLMTokenUsage:
properties:
Expand Down
25 changes: 13 additions & 12 deletions runner/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1014,18 +1014,19 @@ components:
description: Response model for text generation.
LLMChoice:
properties:
delta:
$ref: '#/components/schemas/LLMMessage'
index:
type: integer
title: Index
finish_reason:
type: string
title: Finish Reason
default: ''
delta:
$ref: '#/components/schemas/LLMMessage'
message:
$ref: '#/components/schemas/LLMMessage'
type: object
required:
- delta
- index
title: LLMChoice
LLMMessage:
Expand Down Expand Up @@ -1078,13 +1079,6 @@ components:
title: LLMRequest
LLMResponse:
properties:
choices:
items:
$ref: '#/components/schemas/LLMChoice'
type: array
title: Choices
tokens_used:
$ref: '#/components/schemas/LLMTokenUsage'
id:
type: string
title: Id
Expand All @@ -1094,13 +1088,20 @@ components:
created:
type: integer
title: Created
tokens_used:
$ref: '#/components/schemas/LLMTokenUsage'
choices:
items:
$ref: '#/components/schemas/LLMChoice'
type: array
title: Choices
type: object
required:
- choices
- tokens_used
- id
- model
- created
- tokens_used
- choices
title: LLMResponse
LLMTokenUsage:
properties:
Expand Down
Loading

0 comments on commit a943e98

Please sign in to comment.