Skip to content

Commit

Permalink
development -> main (#316)
Browse files Browse the repository at this point in the history
* Fix LMStudio and Oobabooga max_tokens in request body (#311)

OpenAI-like API expect 'max_tokens' parameter

* Add FIM template for Codestral-22B (#312)

* started adding codestral

* final additions for codestral

* 3.16.5

---------

Co-authored-by: AndrewRocky <[email protected]>
  • Loading branch information
rjmacarthy and AndrewRocky authored Sep 18, 2024
1 parent 391985f commit 11ce1c2
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 6 deletions.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "twinny",
"displayName": "twinny - AI Code Completion and Chat",
"description": "Locally hosted AI code completion plugin for vscode",
"version": "3.16.4",
"version": "3.16.5",
"icon": "assets/icon.png",
"keywords": [
"code-inference",
Expand Down
3 changes: 3 additions & 0 deletions src/common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ export const FIM_TEMPLATE_FORMAT = {
codegemma: 'codegemma',
codellama: 'codellama',
codeqwen: 'codeqwen',
codestral: 'codestral',
custom: 'custom-template',
deepseek: 'deepseek',
llama: 'llama',
Expand All @@ -216,6 +217,8 @@ export const STOP_STARCODER = ['<|endoftext|>', '<file_sep>']

export const STOP_CODEGEMMA = ['<|file_separator|>', '<|end_of_turn|>', '<eos>']

export const STOP_CODESTRAL = ['[PREFIX]', '[SUFFIX]']

export const DEFAULT_TEMPLATE_NAMES = defaultTemplates.map(({ name }) => name)

export const DEFAULT_ACTION_TEMPLATES = [
Expand Down
33 changes: 32 additions & 1 deletion src/extension/fim-templates.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ import {
STOP_DEEPSEEK,
STOP_LLAMA,
STOP_STARCODER,
STOP_CODEGEMMA
STOP_CODEGEMMA,
STOP_CODESTRAL
} from '../common/constants'
import { supportedLanguages } from '../common/languages'
import { FimPromptTemplate } from '../common/types'
Expand Down Expand Up @@ -61,6 +62,23 @@ export const getFimPromptTemplateDeepseek = ({
return `<|fim▁begin|>${fileContext}\n${heading}${prefix}<|fim▁hole|>${suffix}<|fim▁end|>`
}

export const getFimPromptTemplateCodestral = ({
context,
header,
fileContextEnabled,
prefixSuffix,
language
}: FimPromptTemplate) => {
const { prefix, suffix } = prefixSuffix
const { fileContext, heading } = getFileContext(
fileContextEnabled,
context,
language,
header
)
return `${fileContext}\n\n[SUFFIX]${suffix}[PREFIX]${heading}${prefix}`
}

export const getFimPromptTemplateOther = ({
context,
header,
Expand Down Expand Up @@ -90,6 +108,10 @@ function getFimTemplateAuto(fimModel: string, args: FimPromptTemplate) {
return getFimPromptTemplateDeepseek(args)
}

if (fimModel.includes(FIM_TEMPLATE_FORMAT.codestral)) {
return getFimPromptTemplateCodestral(args)
}

if (
fimModel.includes(FIM_TEMPLATE_FORMAT.stableCode) ||
fimModel.includes(FIM_TEMPLATE_FORMAT.starcoder) ||
Expand All @@ -111,6 +133,10 @@ function getFimTemplateChosen(format: string, args: FimPromptTemplate) {
return getFimPromptTemplateDeepseek(args)
}

if (format === FIM_TEMPLATE_FORMAT.codestral) {
return getFimPromptTemplateCodestral(args)
}

if (
format === FIM_TEMPLATE_FORMAT.stableCode ||
format === FIM_TEMPLATE_FORMAT.starcoder ||
Expand Down Expand Up @@ -157,6 +183,10 @@ export const getStopWordsAuto = (fimModel: string) => {
return STOP_CODEGEMMA
}

if (fimModel.includes(FIM_TEMPLATE_FORMAT.codestral)) {
return STOP_CODESTRAL
}

return STOP_LLAMA
}

Expand All @@ -169,6 +199,7 @@ export const getStopWordsChosen = (format: string) => {
)
return STOP_STARCODER
if (format === FIM_TEMPLATE_FORMAT.codegemma) return STOP_CODEGEMMA
if (format === FIM_TEMPLATE_FORMAT.codestral) return STOP_CODESTRAL
return STOP_LLAMA
}

Expand Down
4 changes: 2 additions & 2 deletions src/extension/provider-options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,15 +76,15 @@ export function createStreamRequestBodyFim(
prompt,
stream: true,
temperature: options.temperature,
n_predict: options.numPredictFim
max_tokens: options.numPredictFim
}
case apiProviders.LlamaCpp:
case apiProviders.Oobabooga:
return {
prompt,
stream: true,
temperature: options.temperature,
n_predict: options.numPredictFim
max_tokens: options.numPredictFim
}
case apiProviders.LiteLLM:
return {
Expand Down

0 comments on commit 11ce1c2

Please sign in to comment.