Skip to content

Commit bdb348e

Browse files
committed
updated workflows
1 parent 54d6a3b commit bdb348e

File tree

3 files changed

+294
-1
lines changed

3 files changed

+294
-1
lines changed

Dockerfile

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# ---- Base Node ----
2+
FROM node:19-alpine AS base
3+
WORKDIR /app
4+
COPY package*.json ./
5+
6+
# ---- Dependencies ----
7+
FROM base AS dependencies
8+
RUN npm ci
9+
10+
# ---- Build ----
11+
FROM dependencies AS build
12+
COPY . .
13+
RUN npm run build
14+
15+
# ---- Production ----
16+
FROM node:19-alpine AS production
17+
WORKDIR /app
18+
COPY --from=dependencies /app/node_modules ./node_modules
19+
COPY --from=build /app/.next ./.next
20+
COPY --from=build /app/public ./public
21+
COPY --from=build /app/package*.json ./
22+
COPY --from=build /app/next.config.js ./next.config.js
23+
COPY --from=build /app/next-i18next.config.js ./next-i18next.config.js
24+
25+
# Expose the port the app will run on
26+
EXPOSE 3000
27+
28+
# Start the application
29+
CMD ["npm", "start"]
Lines changed: 264 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,264 @@
1+
import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const';
2+
import {
3+
cleanData,
4+
isExportFormatV1,
5+
isExportFormatV2,
6+
isExportFormatV3,
7+
isExportFormatV4,
8+
isLatestExportFormat,
9+
} from '@/utils/app/importExport';
10+
11+
import { ExportFormatV1, ExportFormatV2, ExportFormatV4 } from '@/types/export';
12+
import { OpenAIModelID, OpenAIModels } from '@/types/openai';
13+
14+
import { describe, expect, it } from 'vitest';
15+
16+
describe('Export Format Functions', () => {
17+
describe('isExportFormatV1', () => {
18+
it('should return true for v1 format', () => {
19+
const obj = [{ id: 1 }];
20+
expect(isExportFormatV1(obj)).toBe(true);
21+
});
22+
23+
it('should return false for non-v1 formats', () => {
24+
const obj = { version: 3, history: [], folders: [] };
25+
expect(isExportFormatV1(obj)).toBe(false);
26+
});
27+
});
28+
29+
describe('isExportFormatV2', () => {
30+
it('should return true for v2 format', () => {
31+
const obj = { history: [], folders: [] };
32+
expect(isExportFormatV2(obj)).toBe(true);
33+
});
34+
35+
it('should return false for non-v2 formats', () => {
36+
const obj = { version: 3, history: [], folders: [] };
37+
expect(isExportFormatV2(obj)).toBe(false);
38+
});
39+
});
40+
41+
describe('isExportFormatV3', () => {
42+
it('should return true for v3 format', () => {
43+
const obj = { version: 3, history: [], folders: [] };
44+
expect(isExportFormatV3(obj)).toBe(true);
45+
});
46+
47+
it('should return false for non-v3 formats', () => {
48+
const obj = { version: 4, history: [], folders: [] };
49+
expect(isExportFormatV3(obj)).toBe(false);
50+
});
51+
});
52+
53+
describe('isExportFormatV4', () => {
54+
it('should return true for v4 format', () => {
55+
const obj = { version: 4, history: [], folders: [], prompts: [] };
56+
expect(isExportFormatV4(obj)).toBe(true);
57+
});
58+
59+
it('should return false for non-v4 formats', () => {
60+
const obj = { version: 5, history: [], folders: [], prompts: [] };
61+
expect(isExportFormatV4(obj)).toBe(false);
62+
});
63+
});
64+
});
65+
66+
describe('cleanData Functions', () => {
67+
describe('cleaning v1 data', () => {
68+
it('should return the latest format', () => {
69+
const data = [
70+
{
71+
id: 1,
72+
name: 'conversation 1',
73+
messages: [
74+
{
75+
role: 'user',
76+
content: "what's up ?",
77+
},
78+
{
79+
role: 'assistant',
80+
content: 'Hi',
81+
},
82+
],
83+
},
84+
] as ExportFormatV1;
85+
const obj = cleanData(data);
86+
expect(isLatestExportFormat(obj)).toBe(true);
87+
expect(obj).toEqual({
88+
version: 4,
89+
history: [
90+
{
91+
id: 1,
92+
name: 'conversation 1',
93+
messages: [
94+
{
95+
role: 'user',
96+
content: "what's up ?",
97+
},
98+
{
99+
role: 'assistant',
100+
content: 'Hi',
101+
},
102+
],
103+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
104+
prompt: DEFAULT_SYSTEM_PROMPT,
105+
temperature: DEFAULT_TEMPERATURE,
106+
folderId: null,
107+
},
108+
],
109+
folders: [],
110+
prompts: [],
111+
});
112+
});
113+
});
114+
115+
describe('cleaning v2 data', () => {
116+
it('should return the latest format', () => {
117+
const data = {
118+
history: [
119+
{
120+
id: '1',
121+
name: 'conversation 1',
122+
messages: [
123+
{
124+
role: 'user',
125+
content: "what's up ?",
126+
},
127+
{
128+
role: 'assistant',
129+
content: 'Hi',
130+
},
131+
],
132+
},
133+
],
134+
folders: [
135+
{
136+
id: 1,
137+
name: 'folder 1',
138+
},
139+
],
140+
} as ExportFormatV2;
141+
const obj = cleanData(data);
142+
expect(isLatestExportFormat(obj)).toBe(true);
143+
expect(obj).toEqual({
144+
version: 4,
145+
history: [
146+
{
147+
id: '1',
148+
name: 'conversation 1',
149+
messages: [
150+
{
151+
role: 'user',
152+
content: "what's up ?",
153+
},
154+
{
155+
role: 'assistant',
156+
content: 'Hi',
157+
},
158+
],
159+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
160+
prompt: DEFAULT_SYSTEM_PROMPT,
161+
temperature: DEFAULT_TEMPERATURE,
162+
folderId: null,
163+
},
164+
],
165+
folders: [
166+
{
167+
id: '1',
168+
name: 'folder 1',
169+
type: 'chat',
170+
},
171+
],
172+
prompts: [],
173+
});
174+
});
175+
});
176+
177+
describe('cleaning v4 data', () => {
178+
it('should return the latest format', () => {
179+
const data = {
180+
version: 4,
181+
history: [
182+
{
183+
id: '1',
184+
name: 'conversation 1',
185+
messages: [
186+
{
187+
role: 'user',
188+
content: "what's up ?",
189+
},
190+
{
191+
role: 'assistant',
192+
content: 'Hi',
193+
},
194+
],
195+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
196+
prompt: DEFAULT_SYSTEM_PROMPT,
197+
temperature: DEFAULT_TEMPERATURE,
198+
folderId: null,
199+
},
200+
],
201+
folders: [
202+
{
203+
id: '1',
204+
name: 'folder 1',
205+
type: 'chat',
206+
},
207+
],
208+
prompts: [
209+
{
210+
id: '1',
211+
name: 'prompt 1',
212+
description: '',
213+
content: '',
214+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
215+
folderId: null,
216+
},
217+
],
218+
} as ExportFormatV4;
219+
220+
const obj = cleanData(data);
221+
expect(isLatestExportFormat(obj)).toBe(true);
222+
expect(obj).toEqual({
223+
version: 4,
224+
history: [
225+
{
226+
id: '1',
227+
name: 'conversation 1',
228+
messages: [
229+
{
230+
role: 'user',
231+
content: "what's up ?",
232+
},
233+
{
234+
role: 'assistant',
235+
content: 'Hi',
236+
},
237+
],
238+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
239+
prompt: DEFAULT_SYSTEM_PROMPT,
240+
temperature: DEFAULT_TEMPERATURE,
241+
folderId: null,
242+
},
243+
],
244+
folders: [
245+
{
246+
id: '1',
247+
name: 'folder 1',
248+
type: 'chat',
249+
},
250+
],
251+
prompts: [
252+
{
253+
id: '1',
254+
name: 'prompt 1',
255+
description: '',
256+
content: '',
257+
model: OpenAIModels[OpenAIModelID.GPT_3_5],
258+
folderId: null,
259+
},
260+
],
261+
});
262+
});
263+
});
264+
});

license

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
MIT License
22

3-
Copyright (c) 2023 SmartGPT
3+
Copyright (c) 2023 Yannick Metz
44

55
Permission is hereby granted, free of charge, to any person obtaining a copy
66
of this software and associated documentation files (the "Software"), to deal

0 commit comments

Comments
 (0)