diff --git a/src/api/api.ts b/src/api/api.ts
index a0ed63a9..60751522 100644
--- a/src/api/api.ts
+++ b/src/api/api.ts
@@ -2,7 +2,7 @@ import { modelMaxToken } from '@constants/chat';
 import countTokens from '@utils/messageUtils';
 import { ShareGPTSubmitBodyInterface } from '@type/api';
 import { ConfigInterface, MessageInterface } from '@type/chat';
-import { isAzureEndpoint } from '@utils/api';
+import { isAzureEndpoint, uuidv4 } from '@utils/api';
 
 export const getChatCompletion = async (
   endpoint: string,
@@ -47,6 +47,9 @@ export const getChatCompletion = async (
     restConfig.model = 'gpt-3.5-turbo';
   }
 
+  // todo: option in config
+  restConfig.user = uuidv4();
+
   const response = await fetch(endpoint, {
     method: 'POST',
     headers,
@@ -98,6 +101,9 @@ export const getChatCompletionStream = async (
 
   const { max_context, ...restConfig } = config;
 
+  // todo: option in config
+  restConfig.user = uuidv4();
+
   const response = await fetch(endpoint, {
     method: 'POST',
     headers,
diff --git a/src/api/helper.ts b/src/api/helper.ts
index 80782d07..5ce879a2 100644
--- a/src/api/helper.ts
+++ b/src/api/helper.ts
@@ -4,14 +4,11 @@ export const parseEventSource = (
   data: string
 ): '[DONE]' | EventSourceData[] => {
   const result = data
-    .split('\n\n')
-    .filter(Boolean)
-    .map((chunk) => {
-      const jsonString = chunk
         .split('\n')
-        .map((line) => line.replace(/^data: /, ''))
-        .join('');
-      if (jsonString === '[DONE]') return jsonString;
+    .filter(line => line.startsWith('data: ') || line === '[DONE]')
+    .map((line) => {
+      if (line === '[DONE]') return line;
+      const jsonString = line.replace(/^data: /, '');
       try {
         const json = JSON.parse(jsonString);
         return json;
diff --git a/src/components/ConfigMenu/ModelSelect.tsx b/src/components/ConfigMenu/ModelSelect.tsx
index c7caae7e..9298dd0b 100644
--- a/src/components/ConfigMenu/ModelSelect.tsx
+++ b/src/components/ConfigMenu/ModelSelect.tsx
@@ -42,8 +42,11 @@ export const ModelSelect = ({
                 switch (m) {
                   case 'gpt-4':
                   case 'gpt-4-32k':
+                  case 'gpt-4-1106-preview':
                   case 'gpt-3.5-turbo':
                   case 'gpt-3.5-turbo-16k':
+                  case 'claude-2':
+                  case 'claude-instant-1':
                     break;
                 }
                 _setModel(m);
diff --git a/src/constants/chat.ts b/src/constants/chat.ts
index 9b3a4a8c..c451b544 100644
--- a/src/constants/chat.ts
+++ b/src/constants/chat.ts
@@ -20,8 +20,11 @@ Respond using Markdown.`;
 export const modelOptions: ModelChoice[] = [
   'gpt-3.5-turbo',
   'gpt-3.5-turbo-16k',
+  'gpt-4-1106-preview',
   'gpt-4',
   'gpt-4-32k',
+  'claude-2',
+  'claude-instant-1',
   // 'gpt-3.5-turbo-0301',
   // 'gpt-4-0314',
   // 'gpt-4-32k-0314',
@@ -38,9 +41,12 @@ export const modelMaxToken = {
   'gpt-4': 8192,
   'gpt-4-0314': 8192,
   'gpt-4-0613': 8192,
+  'gpt-4-1106-preview': 128000,
   'gpt-4-32k': 32768,
   'gpt-4-32k-0314': 32768,
   'gpt-4-32k-0613': 32768,
+  'claude-2': 100000,
+  'claude-instant-1': 100000,
 };
 
 export const modelCost = {
@@ -76,6 +82,10 @@ export const modelCost = {
     prompt: { price: 0.03, unit: 1000 },
     completion: { price: 0.06, unit: 1000 },
   },
+  'gpt-4-1106-preview': {
+    prompt: { price: 0.01, unit: 1000 },
+    completion: { price: 0.03, unit: 1000 }
+  },
   'gpt-4-32k': {
     prompt: { price: 0.06, unit: 1000 },
     completion: { price: 0.12, unit: 1000 },
@@ -88,6 +98,14 @@ export const modelCost = {
     prompt: { price: 0.06, unit: 1000 },
     completion: { price: 0.12, unit: 1000 },
   },
+  'claude-2': {
+    prompt: { price: 0.01102, unit: 1000 },
+    completion: { price: 0.03268, unit: 1000 },
+  },
+  'claude-instant-1': {
+    prompt: { price: 0.00163, unit: 1000 },
+    completion: { price: 0.00551, unit: 1000},
+  }
 };
 
 export const defaultUserMaxToken = 4000;
diff --git a/src/types/chat.ts b/src/types/chat.ts
index 59557669..67901dd8 100644
--- a/src/types/chat.ts
+++ b/src/types/chat.ts
@@ -26,6 +26,7 @@ export interface ConfigInterface {
   presence_penalty: number;
   top_p: number;
   frequency_penalty: number;
+  user?: string;
 }
 
 export interface ChatHistoryInterface {
@@ -53,8 +54,11 @@ export interface Folder {
 export type ModelChoice =
   | 'gpt-4'
   | 'gpt-4-32k'
+  | 'gpt-4-1106-preview'
   | 'gpt-3.5-turbo'
-  | 'gpt-3.5-turbo-16k';
+  | 'gpt-3.5-turbo-16k'
+  | 'claude-2'
+  | 'claude-instant-1';
 // | 'gpt-3.5-turbo-0301';
 // | 'gpt-4-0314'
 // | 'gpt-4-32k-0314'
diff --git a/src/utils/api.ts b/src/utils/api.ts
index d66c22a4..fed9c208 100644
--- a/src/utils/api.ts
+++ b/src/utils/api.ts
@@ -1,3 +1,8 @@
 export const isAzureEndpoint = (endpoint: string) => {
   return endpoint.includes('openai.azure.com');
 };
+
+export const uuidv4 = (): string =>
+  'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c =>
+    ((c === 'x' ? Math.random() * 16 : (Math.random() * 16 & 0x3 | 0x8)) | 0).toString(16)
+  );
\ No newline at end of file