Skip to content

Commit

Permalink
Merge pull request #119 from jackschedel/2.1.0
Browse files Browse the repository at this point in the history
fix read prompt cost no model null check
  • Loading branch information
jackschedel authored Feb 14, 2024
2 parents 0cbb4ad + 3125a93 commit 264b1bf
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 2 deletions.
28 changes: 28 additions & 0 deletions src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,34 @@ function App() {
setModelDefs(modelDefs);
}

// apparantly the migration isn't gaurenteed, so here's some terrible code :)
if (!modelDefs && !modelDefs[0]) {
const defaultModelDefs = [
{
name: 'gpt-3',
model: 'gpt-3.5-turbo',
endpoint: 0,
model_max_context: 16385,
model_max_tokens: 4096,
prompt_cost_1000: 0.0005,
completion_cost_1000: 0.0005,
swap_visible: true,
},
{
name: 'gpt-4',
model: 'gpt-4-turbo-preview',
endpoint: 0,
model_max_context: 128000,
model_max_tokens: 4096,
prompt_cost_1000: 0.01,
completion_cost_1000: 0.03,
swap_visible: true,
},
];

setModelDefs(defaultModelDefs);
}

const handleGenerate = () => {
if (useStore.getState().generating) return;
const updatedChats: ChatInterface[] = JSON.parse(
Expand Down
2 changes: 1 addition & 1 deletion src/components/ApiMenu/ApiMenu.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ const ApiMenu = ({
pattern='[0-9]*'
className='text-custom-black p-3 text-sm border-none bg-custom-white rounded-md m-0 w-full mr-0 h-8 focus:outline-none'
placeholder='Prompt Cost*'
value={modelDef.prompt_cost_1000 || ''}
value={modelDef?.prompt_cost_1000 || ''}
onChange={(e) => {
const value = Number(e.target.value);

Expand Down
2 changes: 1 addition & 1 deletion src/components/TokenCount/TokenCount.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ const TokenCount = React.memo(() => {
const model = useStore((state) => state.modelDefs[model_num]);

const cost = useMemo(() => {
if (!model.prompt_cost_1000) {
if (!model?.prompt_cost_1000) {
return 0;
}
const price = model.prompt_cost_1000 * (tokenCount / 1000);
Expand Down

0 comments on commit 264b1bf

Please sign in to comment.