|
1 |
| -import Instructor from '@instructor-ai/instructor'; |
| 1 | +import type { z } from 'zod'; |
| 2 | + |
2 | 3 | import { taskEither as TE } from 'fp-ts';
|
3 | 4 | import { pipe } from 'fp-ts/lib/function';
|
4 |
| -import { OpenAI } from 'openai'; |
5 | 5 | import { inject, injectable } from 'tsyringe';
|
6 |
| -import { z } from 'zod'; |
7 | 6 |
|
8 |
| -import type { |
9 |
| - SdkCreateMessageInputT, |
10 |
| - SdkMessageT, |
11 |
| - SdkSearchAIModelItemT, |
12 |
| - SdkTableRowWithIdT, |
13 |
| -} from '@llm/sdk'; |
| 7 | +import type { SdkSearchAIModelItemT, SdkTableRowWithIdT } from '@llm/sdk'; |
14 | 8 |
|
15 |
| -import { rejectFalsyItems } from '@llm/commons'; |
| 9 | +import type { AIProxyInstructedAttrs, AIProxyPromptAttrs, AIProxyStreamPromptAttrs } from './clients/ai-proxy'; |
16 | 10 |
|
17 | 11 | import { AIModelsService } from '../ai-models';
|
18 |
| -import { OpenAIConnectionCreatorError } from './ai-connector.errors'; |
19 |
| - |
20 |
| -const DEFAULT_CLIENT_CONFIG = { |
21 |
| - temperature: 0.7, |
22 |
| - top_p: 1, |
23 |
| - max_tokens: 4096, |
24 |
| - frequency_penalty: 0, |
25 |
| - presence_penalty: 0.6, |
26 |
| -}; |
| 12 | +import { getAIModelProxyForModel } from './clients'; |
27 | 13 |
|
28 | 14 | @injectable()
|
29 | 15 | export class AIConnectorService {
|
30 | 16 | constructor(
|
31 | 17 | @inject(AIModelsService) private readonly aiModelsService: AIModelsService,
|
32 | 18 | ) {}
|
33 | 19 |
|
34 |
| - executeEmbeddingPrompt = ( |
35 |
| - { |
36 |
| - aiModel, |
37 |
| - input, |
38 |
| - }: { |
39 |
| - aiModel: SdkTableRowWithIdT; |
40 |
| - input: string; |
41 |
| - }, |
42 |
| - ) => pipe( |
43 |
| - this.aiModelsService.get(aiModel.id), |
44 |
| - TE.chainW(({ credentials: { apiKey, apiModel } }) => { |
45 |
| - const ai = new OpenAI({ |
46 |
| - apiKey, |
47 |
| - }); |
48 |
| - |
49 |
| - return OpenAIConnectionCreatorError.tryCatch( |
50 |
| - async () => { |
51 |
| - const result = await ai.embeddings.create({ |
52 |
| - input, |
53 |
| - model: apiModel, |
54 |
| - }); |
55 |
| - |
56 |
| - return result.data[0].embedding; |
57 |
| - }, |
58 |
| - ); |
59 |
| - }), |
60 |
| - ); |
| 20 | + executeEmbeddingPrompt = ({ aiModel, input }: { aiModel: SdkTableRowWithIdT; input: string; }) => |
| 21 | + pipe( |
| 22 | + this.aiModelsService.get(aiModel.id), |
| 23 | + TE.map(getAIModelProxyForModel), |
| 24 | + TE.chainW(proxy => proxy.executeEmbeddingPrompt(input)), |
| 25 | + ); |
61 | 26 |
|
62 | 27 | executeStreamPrompt = (
|
63 |
| - { |
64 |
| - aiModel, |
65 |
| - history, |
66 |
| - context, |
67 |
| - message, |
68 |
| - signal, |
69 |
| - }: { |
70 |
| - aiModel: SdkTableRowWithIdT; |
71 |
| - history: SdkMessageT[]; |
72 |
| - message?: SdkCreateMessageInputT; |
73 |
| - signal?: AbortSignal; |
74 |
| - context?: string; |
75 |
| - }, |
76 |
| - ) => pipe( |
77 |
| - this.aiModelsService.get(aiModel.id), |
78 |
| - TE.chainW(({ credentials }) => { |
79 |
| - const ai = new OpenAI({ |
80 |
| - apiKey: credentials.apiKey, |
81 |
| - }); |
82 |
| - |
83 |
| - return OpenAIConnectionCreatorError.tryCatch( |
84 |
| - () => ai.chat.completions.create({ |
85 |
| - ...DEFAULT_CLIENT_CONFIG, |
86 |
| - stream: true, |
87 |
| - model: credentials.apiModel, |
88 |
| - messages: rejectFalsyItems([ |
89 |
| - !!context && { |
90 |
| - role: 'system', |
91 |
| - content: context, |
92 |
| - }, |
93 |
| - ...this.normalizeMessagesToCompletion(history), |
94 |
| - !!message?.content && { |
95 |
| - role: 'user', |
96 |
| - content: message.content, |
97 |
| - }, |
98 |
| - ]), |
99 |
| - }, { signal }), |
100 |
| - ); |
101 |
| - }), |
102 |
| - ); |
| 28 | + params: { aiModel: SdkTableRowWithIdT; } & AIProxyStreamPromptAttrs) => |
| 29 | + pipe( |
| 30 | + this.aiModelsService.get(params.aiModel.id), |
| 31 | + TE.map(getAIModelProxyForModel), |
| 32 | + TE.chainW(proxy => proxy.executeStreamPrompt(params)), |
| 33 | + ); |
103 | 34 |
|
104 | 35 | executePrompt = (
|
105 |
| - { |
106 |
| - aiModel, |
107 |
| - history = [], |
108 |
| - message, |
109 |
| - }: { |
| 36 | + { aiModel, ...rest }: { |
110 | 37 | aiModel: SdkTableRowWithIdT | SdkSearchAIModelItemT;
|
111 |
| - history?: SdkMessageT[]; |
112 |
| - message: string | OpenAI.Chat.Completions.ChatCompletionMessageParam; |
113 |
| - }, |
114 |
| - ) => pipe( |
115 |
| - 'credentials' in aiModel |
116 |
| - ? TE.of(aiModel) |
117 |
| - : this.aiModelsService.get(aiModel.id), |
118 |
| - TE.chainW(({ credentials }) => { |
119 |
| - const ai = new OpenAI({ |
120 |
| - apiKey: credentials.apiKey, |
121 |
| - }); |
122 |
| - |
123 |
| - const client = Instructor({ |
124 |
| - client: ai, |
125 |
| - mode: this.determineInstructorMode(credentials.apiModel), |
126 |
| - }); |
127 |
| - |
128 |
| - return OpenAIConnectionCreatorError.tryCatch( |
129 |
| - async () => { |
130 |
| - const result = await client.chat.completions.create({ |
131 |
| - model: credentials.apiModel, |
132 |
| - messages: [ |
133 |
| - ...this.normalizeMessagesToCompletion(history), |
134 |
| - typeof message === 'string' |
135 |
| - ? { |
136 |
| - role: 'user', |
137 |
| - content: message, |
138 |
| - } |
139 |
| - : message, |
140 |
| - ], |
141 |
| - }); |
142 |
| - |
143 |
| - return result.choices[0].message.content; |
144 |
| - }, |
145 |
| - ); |
146 |
| - }), |
147 |
| - ); |
| 38 | + } & AIProxyPromptAttrs, |
| 39 | + ) => |
| 40 | + pipe( |
| 41 | + 'credentials' in aiModel |
| 42 | + ? TE.of(aiModel) |
| 43 | + : this.aiModelsService.get(aiModel.id), |
| 44 | + TE.map(getAIModelProxyForModel), |
| 45 | + TE.chainW(proxy => proxy.executePrompt(rest)), |
| 46 | + ); |
148 | 47 |
|
149 | 48 | executeInstructedPrompt = <Z extends z.AnyZodObject>(
|
150 |
| - { |
151 |
| - aiModel, |
152 |
| - history = [], |
153 |
| - message, |
154 |
| - schema, |
155 |
| - }: { |
| 49 | + { aiModel, ...rest }: { |
156 | 50 | aiModel: SdkTableRowWithIdT | SdkSearchAIModelItemT;
|
157 |
| - history?: SdkMessageT[]; |
158 |
| - message: string; |
159 |
| - schema: Z; |
160 |
| - }, |
161 |
| - ) => pipe( |
162 |
| - 'credentials' in aiModel |
163 |
| - ? TE.of(aiModel) |
164 |
| - : this.aiModelsService.get(aiModel.id), |
165 |
| - TE.chainW(({ credentials }) => { |
166 |
| - const ai = new OpenAI({ |
167 |
| - apiKey: credentials.apiKey, |
168 |
| - }); |
169 |
| - |
170 |
| - const client = Instructor({ |
171 |
| - client: ai, |
172 |
| - mode: this.determineInstructorMode(credentials.apiModel), |
173 |
| - }); |
174 |
| - |
175 |
| - return OpenAIConnectionCreatorError.tryCatch( |
176 |
| - async () => client.chat.completions.create({ |
177 |
| - ...DEFAULT_CLIENT_CONFIG, |
178 |
| - model: credentials.apiModel, |
179 |
| - messages: [ |
180 |
| - ...this.normalizeMessagesToCompletion(history), |
181 |
| - { |
182 |
| - role: 'user', |
183 |
| - content: message, |
184 |
| - }, |
185 |
| - ], |
186 |
| - stream: false, |
187 |
| - response_model: { |
188 |
| - name: 'Extractor', |
189 |
| - schema, |
190 |
| - }, |
191 |
| - }), |
192 |
| - ); |
193 |
| - }), |
194 |
| - ); |
195 |
| - |
196 |
| - private determineInstructorMode = (model: string): 'JSON' | 'JSON_SCHEMA' | 'FUNCTIONS' | 'TOOLS' => { |
197 |
| - // GPT-4 models support function calling |
198 |
| - if (model.includes('gpt-4')) { |
199 |
| - return 'FUNCTIONS'; |
200 |
| - } |
201 |
| - |
202 |
| - // GPT-3.5-turbo models after June 13th, 2023 support function calling |
203 |
| - if (model.includes('gpt-3.5-turbo') && !model.includes('0301')) { |
204 |
| - return 'FUNCTIONS'; |
205 |
| - } |
206 |
| - |
207 |
| - // Claude models work best with JSON mode |
208 |
| - if (model.includes('claude')) { |
209 |
| - return 'JSON'; |
210 |
| - } |
211 |
| - |
212 |
| - // Default to JSON mode as it's most widely supported |
213 |
| - return 'JSON'; |
214 |
| - }; |
215 |
| - |
216 |
| - private normalizeMessagesToCompletion = (messages: SdkMessageT[]) => |
217 |
| - messages.map(({ content, role }): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({ |
218 |
| - role, |
219 |
| - content, |
220 |
| - })); |
| 51 | + } & AIProxyInstructedAttrs<Z>, |
| 52 | + ) => |
| 53 | + pipe( |
| 54 | + 'credentials' in aiModel |
| 55 | + ? TE.of(aiModel) |
| 56 | + : this.aiModelsService.get(aiModel.id), |
| 57 | + TE.map(getAIModelProxyForModel), |
| 58 | + TE.chainW(proxy => proxy.executeInstructedPrompt(rest)), |
| 59 | + ); |
221 | 60 | }
|
0 commit comments