Skip to content

Commit

Permalink
Add GPT-4 support (#25)
Browse files Browse the repository at this point in the history
* mobile ui updates

* fixes sidebar btn

* return if null

* mobile input blur

* handle mobile enter key

* new convo name

* new delete mechanism

* test height

* revert

* change padding

* remove overflow

* check relative

* padding

* done

* retry

* test

* test

* should work now

* test

* test

* more

* max h

* revert

* done
  • Loading branch information
mckaywrigley committed Mar 20, 2023
1 parent 9a48248 commit 7810a3e
Show file tree
Hide file tree
Showing 8 changed files with 197 additions and 45 deletions.
25 changes: 13 additions & 12 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
import { Message, OpenAIModel, OpenAIModelNames } from "@/types";
import { Conversation, Message, OpenAIModel } from "@/types";
import { FC, useEffect, useRef } from "react";
import { ChatInput } from "./ChatInput";
import { ChatLoader } from "./ChatLoader";
import { ChatMessage } from "./ChatMessage";
import { ModelSelect } from "./ModelSelect";

interface Props {
model: OpenAIModel;
messages: Message[];
conversation: Conversation;
models: OpenAIModel[];
messageIsStreaming: boolean;
loading: boolean;
lightMode: "light" | "dark";
onSend: (message: Message) => void;
onSelect: (model: OpenAIModel) => void;
onModelChange: (conversation: Conversation, model: OpenAIModel) => void;
}

export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading, lightMode, onSend, onSelect }) => {
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, loading, lightMode, onSend, onModelChange }) => {
const messagesEndRef = useRef<HTMLDivElement>(null);

const scrollToBottom = () => {
Expand All @@ -24,27 +24,28 @@ export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading,

useEffect(() => {
scrollToBottom();
}, [messages]);
}, [conversation.messages]);

return (
<div className="flex-1 overflow-scroll dark:bg-[#343541]">
<div>
{messages.length === 0 ? (
{conversation.messages.length === 0 ? (
<>
<div className="flex justify-center pt-8">
<ModelSelect
model={model}
onSelect={onSelect}
model={conversation.model}
models={models}
onModelChange={(model) => onModelChange(conversation, model)}
/>
</div>

<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">Chatbot UI</div>
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{loading ? "Loading..." : "Chatbot UI"}</div>
</>
) : (
<>
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {OpenAIModelNames[model]}</div>
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>

{messages.map((message, index) => (
{conversation.messages.map((message, index) => (
<ChatMessage
key={index}
message={message}
Expand Down
21 changes: 12 additions & 9 deletions components/Chat/ModelSelect.tsx
Original file line number Diff line number Diff line change
@@ -1,27 +1,30 @@
import { OpenAIModel, OpenAIModelNames } from "@/types";
import { OpenAIModel } from "@/types";
import { FC } from "react";

interface Props {
model: OpenAIModel;
onSelect: (model: OpenAIModel) => void;
models: OpenAIModel[];
onModelChange: (model: OpenAIModel) => void;
}

export const ModelSelect: FC<Props> = ({ model, onSelect }) => {
export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
return (
<div className="flex flex-col">
<label className="text-left mb-2 dark:text-neutral-400 text-neutral-700">Model</label>
<select
className="w-[300px] p-3 dark:text-white dark:bg-[#343541] border border-neutral-500 rounded-lg appearance-none focus:shadow-outline text-neutral-900 cursor-pointer"
placeholder="Select a model"
value={model}
onChange={(e) => onSelect(e.target.value as OpenAIModel)}
value={model.id}
onChange={(e) => {
onModelChange(models.find((model) => model.id === e.target.value) as OpenAIModel);
}}
>
{Object.entries(OpenAIModelNames).map(([value, name]) => (
{models.map((model) => (
<option
key={value}
value={value}
key={model.id}
value={model.id}
>
{name}
{model.name}
</option>
))}
</select>
Expand Down
4 changes: 2 additions & 2 deletions pages/api/chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Message, OpenAIModel } from "@/types";
import { OpenAIStream } from "@/utils";
import { OpenAIStream } from "@/utils/server";

export const config = {
runtime: "edge"
Expand All @@ -23,7 +23,7 @@ const handler = async (req: Request): Promise<Response> => {
break;
}
charCount += message.content.length;
messagesToSend = [message, ...messagesToSend]
messagesToSend = [message, ...messagesToSend];
}

const stream = await OpenAIStream(model, key, messagesToSend);
Expand Down
46 changes: 46 additions & 0 deletions pages/api/models.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";

export const config = {
runtime: "edge"
};

const handler = async (req: Request): Promise<Response> => {
try {
const { key } = (await req.json()) as {
key: string;
};

const response = await fetch("https://api.openai.com/v1/models", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
}
});

if (response.status !== 200) {
throw new Error("OpenAI API returned an error");
}

const json = await response.json();

const models: OpenAIModel[] = json.data
.map((model: any) => {
for (const [key, value] of Object.entries(OpenAIModelID)) {
if (value === model.id) {
return {
id: model.id,
name: OpenAIModels[value].name
};
}
}
})
.filter(Boolean);

return new Response(JSON.stringify(models), { status: 200 });
} catch (error) {
console.error(error);
return new Response("Error", { status: 500 });
}
};

export default handler;
79 changes: 65 additions & 14 deletions pages/index.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { Chat } from "@/components/Chat/Chat";
import { Navbar } from "@/components/Mobile/Navbar";
import { Sidebar } from "@/components/Sidebar/Sidebar";
import { Conversation, Message, OpenAIModel } from "@/types";
import { Conversation, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";
import { cleanConversationHistory, cleanSelectedConversation } from "@/utils/app";
import { IconArrowBarLeft, IconArrowBarRight } from "@tabler/icons-react";
import Head from "next/head";
import { useEffect, useState } from "react";
Expand All @@ -10,7 +11,7 @@ export default function Home() {
const [conversations, setConversations] = useState<Conversation[]>([]);
const [selectedConversation, setSelectedConversation] = useState<Conversation>();
const [loading, setLoading] = useState<boolean>(false);
const [model, setModel] = useState<OpenAIModel>(OpenAIModel.GPT_3_5);
const [models, setModels] = useState<OpenAIModel[]>([]);
const [lightMode, setLightMode] = useState<"dark" | "light">("dark");
const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false);
const [showSidebar, setShowSidebar] = useState<boolean>(true);
Expand All @@ -33,7 +34,7 @@ export default function Home() {
"Content-Type": "application/json"
},
body: JSON.stringify({
model,
model: updatedConversation.model,
messages: updatedConversation.messages,
key: apiKey
})
Expand All @@ -47,6 +48,8 @@ export default function Home() {
const data = response.body;

if (!data) {
setLoading(false);
setMessageIsStreaming(false);
return;
}

Expand Down Expand Up @@ -144,13 +147,35 @@ export default function Home() {
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
};

const handleChangeModel = (conversation: Conversation, model: OpenAIModel) => {
const updatedConversation = {
...conversation,
model
};

const updatedConversations = conversations.map((c) => {
if (c.id === updatedConversation.id) {
return updatedConversation;
}

return c;
});

setConversations(updatedConversations);
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));

setSelectedConversation(updatedConversation);
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
};

const handleNewConversation = () => {
const lastConversation = conversations[conversations.length - 1];

const newConversation: Conversation = {
id: lastConversation ? lastConversation.id + 1 : 1,
name: `Conversation ${lastConversation ? lastConversation.id + 1 : 1}`,
messages: []
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5]
};

const updatedConversations = [...conversations, newConversation];
Expand All @@ -160,7 +185,6 @@ export default function Home() {
setSelectedConversation(newConversation);
localStorage.setItem("selectedConversation", JSON.stringify(newConversation));

setModel(OpenAIModel.GPT_3_5);
setLoading(false);
};

Expand All @@ -181,7 +205,8 @@ export default function Home() {
setSelectedConversation({
id: 1,
name: "New conversation",
messages: []
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5]
});
localStorage.removeItem("selectedConversation");
}
Expand All @@ -192,6 +217,27 @@ export default function Home() {
localStorage.setItem("apiKey", apiKey);
};

const fetchModels = async () => {
setLoading(true);

const response = await fetch("/api/models", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
key: apiKey
})
});
const data = await response.json();

if (data) {
setModels(data);
}

setLoading(false);
};

useEffect(() => {
const theme = localStorage.getItem("theme");
if (theme) {
Expand All @@ -208,21 +254,27 @@ export default function Home() {
}

const conversationHistory = localStorage.getItem("conversationHistory");

if (conversationHistory) {
setConversations(JSON.parse(conversationHistory));
const parsedConversationHistory: Conversation[] = JSON.parse(conversationHistory);
const cleanedConversationHistory = cleanConversationHistory(parsedConversationHistory);
setConversations(cleanedConversationHistory);
}

const selectedConversation = localStorage.getItem("selectedConversation");
if (selectedConversation) {
setSelectedConversation(JSON.parse(selectedConversation));
const parsedSelectedConversation: Conversation = JSON.parse(selectedConversation);
const cleanedSelectedConversation = cleanSelectedConversation(parsedSelectedConversation);
setSelectedConversation(cleanedSelectedConversation);
} else {
setSelectedConversation({
id: 1,
name: "New conversation",
messages: []
messages: [],
model: OpenAIModels[OpenAIModelID.GPT_3_5]
});
}

fetchModels();
}, []);

return (
Expand All @@ -242,7 +294,6 @@ export default function Home() {
href="/favicon.ico"
/>
</Head>

{selectedConversation && (
<div className={`flex flex-col h-screen w-screen text-white ${lightMode}`}>
<div className="sm:hidden w-full fixed top-0">
Expand Down Expand Up @@ -283,13 +334,13 @@ export default function Home() {
)}

<Chat
conversation={selectedConversation}
messageIsStreaming={messageIsStreaming}
model={model}
messages={selectedConversation.messages}
models={models}
loading={loading}
lightMode={lightMode}
onSend={handleSend}
onSelect={setModel}
onModelChange={handleChangeModel}
/>
</div>
</div>
Expand Down
32 changes: 25 additions & 7 deletions types/index.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,22 @@
export enum OpenAIModel {
export interface OpenAIModel {
id: string;
name: string;
}

export enum OpenAIModelID {
GPT_3_5 = "gpt-3.5-turbo",
GPT_3_5_LEGACY = "gpt-3.5-turbo-0301"
// GPT_4 = "gpt-4"
GPT_4 = "gpt-4"
}

export const OpenAIModelNames: Record<OpenAIModel, string> = {
[OpenAIModel.GPT_3_5]: "Default (GPT-3.5)",
[OpenAIModel.GPT_3_5_LEGACY]: "Legacy (GPT-3.5)"
// [OpenAIModel.GPT_4]: "GPT-4"
export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
[OpenAIModelID.GPT_3_5]: {
id: OpenAIModelID.GPT_3_5,
name: "Default (GPT-3.5)"
},
[OpenAIModelID.GPT_4]: {
id: OpenAIModelID.GPT_4,
name: "GPT-4"
}
};

export interface Message {
Expand All @@ -21,4 +30,13 @@ export interface Conversation {
id: number;
name: string;
messages: Message[];
model: OpenAIModel;
}

// keep track of local storage schema
export interface LocalStorage {
apiKey: string;
conversationHistory: Conversation[];
selectedConversation: Conversation;
theme: "light" | "dark";
}

2 comments on commit 7810a3e

@vercel
Copy link

@vercel vercel bot commented on 7810a3e Mar 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@hagrace4
Copy link

@hagrace4 hagrace4 commented on 7810a3e Mar 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How would one set chatbot-ui to use GPT_4 and not GPT_3_5?

Or is it automatically detected by the API key given?

Please sign in to comment.