Skip to content

Commit

Permalink
Merge pull request #138 from rjmacarthy/improvement/ollama-openai-api
Browse files Browse the repository at this point in the history
Update ollama chat to use openapi specification
  • Loading branch information
rjmacarthy authored Feb 27, 2024
2 parents ef375cc + e6945c9 commit 94c11b3
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 8 deletions.
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -247,8 +247,8 @@
"twinny.chatApiPath": {
"order": 3,
"type": "string",
"default": "/api/generate",
"description": "Endpoint path for chat completions. Defaults to '/api/generate' for Ollama and '/completion' for llama.cpp.",
"default": "/v1/chat/completions",
"description": "Endpoint path for chat completions.",
"required": true
},
"twinny.chatApiPort": {
Expand All @@ -269,7 +269,7 @@
"order": 6,
"type": "string",
"default": "/api/generate",
"description": "Endpoint path for FIM completions. Defaults to '/api/generate' for Ollama and '/completion' for llama.cpp.",
"description": "Endpoint path for FIM completions.",
"required": true
},
"twinny.chatModelName": {
Expand Down Expand Up @@ -385,7 +385,7 @@
"twinny.enableLogging": {
"order": 23,
"type": "boolean",
"default": false,
"default": true,
"description": "Enable twinny debug mode"
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,12 @@ export const STOP_STABLECODE = ['<|endoftext|>']
export const API_PROVIDER: ApiProviders = {
ollama: {
fimApiPath: '/api/generate',
chatApiPath: '/api/generate',
chatApiPath: '/v1/chat/completions',
port: 11434
},
ollamawebui: {
fimApiPath: '/ollama/api/generate',
chatApiPath: '/ollama/api/generate',
chatApiPath: '/ollama/v1/chat/completions',
port: 8080
},
llamacpp: {
Expand Down
3 changes: 2 additions & 1 deletion src/common/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { CodeLanguageDetails } from './languages'
import { ALL_BRACKETS } from './constants'

export interface StreamBodyBase {
prompt: string
stream: boolean
n_predict?: number
temperature?: number
Expand All @@ -12,6 +11,8 @@ export interface StreamBodyBase {
export interface StreamOptionsOllama extends StreamBodyBase {
model: string
keep_alive?: string | number
messages?: MessageType[] | MessageRoleContent
prompt: string
options: Record<string, unknown>
}

Expand Down
1 change: 1 addition & 0 deletions src/extension/model-options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export function createStreamRequestBody(
model: options.model,
prompt,
stream: true,
messages: options.messages,
keep_alive: options.keepAlive,
options: {
temperature: options.temperature,
Expand Down
5 changes: 4 additions & 1 deletion src/extension/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,10 @@ export const getChatDataFromProvider = (
) => {
switch (provider) {
case ApiProviders.Ollama:
return data?.response
case ApiProviders.OllamaWebUi:
return data?.choices[0].delta?.content
? data?.choices[0].delta.content
: ''
case ApiProviders.LlamaCpp:
return data?.content
default:
Expand Down

0 comments on commit 94c11b3

Please sign in to comment.