Skip to content

Commit

Permalink
Merge pull request #12 from VfBfoerst/master
Browse files Browse the repository at this point in the history
added options for remote ollama api
  • Loading branch information
rjmacarthy authored Dec 27, 2023
2 parents a98d34a + 8697133 commit 5dbcec0
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 2 deletions.
8 changes: 8 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,14 @@
"twinny.ollamaModelName": {
"type": "string",
"default": "codellama:7b-code"
},
"twinny.ollamaBaseUrl": {
"type": "string",
"default": "localhost"
},
"twinny.ollamaApiPort": {
"type": "number",
"default": 11434
}
}
}
Expand Down
6 changes: 4 additions & 2 deletions src/completion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ export class CompletionProvider implements InlineCompletionItemProvider {
private _debounceWait = this._config.get('debounceWait') as number
private _contextLength = this._config.get('contextLength') as number
private _model = this._config.get('ollamaModelName') as string
private _baseurl = this._config.get('ollamaBaseUrl') as string
private _apiport = this._config.get('ollamaApiPort') as number

constructor(statusBar: StatusBarItem) {
this._statusBar = statusBar
Expand Down Expand Up @@ -68,8 +70,8 @@ export class CompletionProvider implements InlineCompletionItemProvider {
this._statusBar.text = '$(loading~spin)'
streamResponse(
{
hostname: 'localhost',
port: 11434,
hostname: this._baseurl,
port: this._apiport,
method: 'POST',
path: '/api/generate'
},
Expand Down

0 comments on commit 5dbcec0

Please sign in to comment.