Skip to content

Commit

Permalink
update, stop generation and max tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
rjmacarthy committed Jan 21, 2024
1 parent 4b6d90c commit 59724af
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 9 deletions.
12 changes: 12 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,10 @@
"shortTitle": "Enable twinny",
"title": "Enable twinny"
},
{
"command": "twinny.stopGeneration",
"title": "Stop generation"
},
{
"command": "twinny.disable",
"title": "Disable twinny",
Expand Down Expand Up @@ -183,6 +187,14 @@
"twinny.temperature": {
"type": "number",
"default": 1
},
"twinny.numPredictChat": {
"type": "number",
"default": 128
},
"twinny.numPredictFim": {
"type": "number",
"default": -2
}
}
}
Expand Down
5 changes: 4 additions & 1 deletion src/chat-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ export class ChatService {
private _baseUrl = this._config.get('ollamaBaseUrl') as string
private _bearerToken = this._config.get('ollamaApiBearerToken') as string
private _chatModel = this._config.get('chatModelName') as string
private _temperature = this._config.get('temperature') as number
private _numPredictChat = this._config.get('umPredictChat') as number
private _completion = ''
private _port = this._config.get('ollamaApiPort') as string
private _view?: WebviewView
Expand All @@ -34,7 +36,8 @@ export class ChatService {
model: this._chatModel,
prompt,
options: {
temperature: this._config.get('temperature') as number
temperature: this._temperature,
num_predict: this._numPredictChat
}
}

Expand Down
3 changes: 3 additions & 0 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ export async function activate(context: ExtensionContext) {
sidebarProvider.chatService?.streamTemplateCompletion('explain')
)
}),
commands.registerCommand('twinny.stopGeneration', () => {
completionProvider.destroyStream()
}),
commands.registerCommand('twinny.addTypes', () => {
commands.executeCommand('workbench.view.extension.twinny-sidebar-view')
delayExecution(() =>
Expand Down
31 changes: 23 additions & 8 deletions src/providers/completion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@ import { getCache, setCache } from '../cache'
import { languages } from '../languages'
import { InlineCompletion, StreamBody } from '../types'
import { RequestOptions } from 'https'
import { ClientRequest } from 'http'

export class CompletionProvider implements InlineCompletionItemProvider {
private _statusBar: StatusBarItem
private statusBar: StatusBarItem
private _debouncer: NodeJS.Timeout | undefined
private _document: TextDocument | undefined
private _config = workspace.getConfiguration('twinny')
Expand All @@ -29,11 +30,13 @@ export class CompletionProvider implements InlineCompletionItemProvider {
private _baseUrl = this._config.get('ollamaBaseUrl') as string
private _port = this._config.get('ollamaApiPort') as number
private _temperature = this._config.get('temperature') as number
private _numPredictFim = this._config.get('numPredictFim') as number
private _useFileContext = this._config.get('useFileContext') as number
private _bearerToken = this._config.get('ollamaApiBearerToken') as number
private _currentReq : ClientRequest | undefined = undefined

constructor(statusBar: StatusBarItem) {
this._statusBar = statusBar
this.statusBar = statusBar
}

private buildStreamRequest(prompt: string) {
Expand All @@ -48,7 +51,7 @@ export class CompletionProvider implements InlineCompletionItemProvider {
prompt,
options: {
temperature: this._temperature,
num_predict: -2,
num_predict: this._numPredictFim || -2
}
}

Expand All @@ -66,6 +69,11 @@ export class CompletionProvider implements InlineCompletionItemProvider {
return { requestOptions, requestBody }
}

public destroyStream = () => {
this._currentReq?.destroy()
this.statusBar.text = '🤖'
}

public async provideInlineCompletionItems(
document: TextDocument,
position: Position
Expand Down Expand Up @@ -112,21 +120,27 @@ export class CompletionProvider implements InlineCompletionItemProvider {
try {
let completion = ''
let chunkCount = 0
this._statusBar.text = '🤖'
this._statusBar.text = '$(loading~spin)'
this.statusBar.text = '$(loading~spin)'
this.statusBar.command = 'twinny.stopGeneration'

const { requestBody, requestOptions } =
this.buildStreamRequest(prompt)

streamResponse({
body: requestBody,
options: requestOptions,
onStart: (req) => {
this._currentReq = req
},
onData: (chunk, onDestroy) => {
const json = JSON.parse(chunk)
completion = completion + json.response
chunkCount = chunkCount + 1
if (json.response.match('<EOT>')) {
this._statusBar.text = '🤖'
if (
(chunkCount !== 1 && json.response === '\n') ||
json.response.match('<EOT>')
) {
this.statusBar.text = '🤖'
completion = completion.replace('<EOT>', '')
onDestroy()
resolve(
Expand All @@ -141,7 +155,7 @@ export class CompletionProvider implements InlineCompletionItemProvider {
}
})
} catch (error) {
this._statusBar.text = '$(alert)'
this.statusBar.text = '$(alert)'
return resolve([] as InlineCompletionItem[])
}
}, this._debounceWait as number)
Expand Down Expand Up @@ -332,5 +346,6 @@ export class CompletionProvider implements InlineCompletionItemProvider {
this._temperature = this._config.get('temperature') as number
this._useFileContext = this._config.get('useFileContext') as number
this._fimModel = this._config.get('fimModelName') as string
this._numPredictFim = this._config.get('numPredictFim') as number
}
}

0 comments on commit 59724af

Please sign in to comment.