1
- import sys
2
1
import os
3
-
4
- # Add the project root to the Python path
5
- project_root = os .path .abspath (os .path .join (os .path .dirname (__file__ ), '..' , '..' ))
6
- sys .path .insert (0 , project_root )
7
-
8
2
import re
9
3
import gradio as gr
10
4
from glob import glob
11
- from app .webui .process import model_load , diff_texts , translator , translator_sec
12
- from llama_index .core import SimpleDirectoryReader
5
+ from process import model_load , diff_texts , translator , translator_sec , extract_docx , extract_pdf , extract_text
13
6
14
7
def huanik (
15
8
endpoint : str ,
@@ -24,16 +17,15 @@ def huanik(
24
17
source_text : str ,
25
18
country : str ,
26
19
max_tokens : int ,
27
- context_window : int ,
28
- num_output : int ,
20
+ temperature : int ,
29
21
rpm : int ,
30
22
):
31
23
32
24
if not source_text or source_lang == target_lang :
33
25
raise gr .Error ("Please check that the content or options are entered correctly." )
34
26
35
27
try :
36
- model_load (endpoint , model , api_key , context_window , num_output , rpm )
28
+ model_load (endpoint , model , api_key , temperature , rpm )
37
29
except Exception as e :
38
30
raise gr .Error (f"An unexpected error occurred: { e } " )
39
31
@@ -44,8 +36,6 @@ def huanik(
44
36
endpoint2 = endpoint2 ,
45
37
model2 = model2 ,
46
38
api_key2 = api_key2 ,
47
- context_window = context_window ,
48
- num_output = num_output ,
49
39
source_lang = source_lang ,
50
40
target_lang = target_lang ,
51
41
source_text = source_text ,
@@ -76,20 +66,24 @@ def update_model(endpoint):
76
66
endpoint_model_map = {
77
67
"Groq" : "llama3-70b-8192" ,
78
68
"OpenAI" : "gpt-4o" ,
79
- "Cohere" : "command-r" ,
80
69
"TogetherAI" : "Qwen/Qwen2-72B-Instruct" ,
81
70
"Ollama" : "llama3" ,
82
- "Huggingface" : "mistralai/Mistral-7B-Instruct-v0.3"
83
71
}
84
72
return gr .update (value = endpoint_model_map [endpoint ])
85
73
86
- def read_doc (file ):
87
- docs = SimpleDirectoryReader (input_files = [file ]).load_data ()
88
- texts = ""
89
- for doc in docs :
90
- texts += doc .text
91
- texts = re .sub (r'(?m)^\s*$\n?' , '' , texts )
92
- return texts
74
+ def read_doc (path ):
75
+ file_type = path .split ("." )[- 1 ]
76
+ print (file_type )
77
+ if file_type in ["pdf" , "txt" , "py" , "docx" , "json" , "cpp" , "md" ]:
78
+ if file_type .endswith ("pdf" ):
79
+ content = extract_pdf (path )
80
+ elif file_type .endswith ("docx" ):
81
+ content = extract_docx (path )
82
+ else :
83
+ content = extract_text (path )
84
+ return re .sub (r'(?m)^\s*$\n?' , '' , content )
85
+ else :
86
+ raise gr .Error ("Oops, unsupported files." )
93
87
94
88
def enable_sec (choice ):
95
89
if choice :
@@ -195,7 +189,7 @@ def closeBtnHide(output_final):
195
189
with gr .Column (scale = 1 ) as menubar :
196
190
endpoint = gr .Dropdown (
197
191
label = "Endpoint" ,
198
- choices = ["Groq" ,"OpenAI" ,"Cohere" , " TogetherAI" ,"Ollama" , "Huggingface " ],
192
+ choices = ["Groq" ,"OpenAI" ,"TogetherAI" ,"Ollama" ],
199
193
value = "OpenAI" ,
200
194
)
201
195
choice = gr .Checkbox (label = "Additional Endpoint" , info = "Additional endpoint for reflection" )
@@ -204,7 +198,7 @@ def closeBtnHide(output_final):
204
198
with gr .Column (visible = False ) as AddEndpoint :
205
199
endpoint2 = gr .Dropdown (
206
200
label = "Additional Endpoint" ,
207
- choices = ["Groq" ,"OpenAI" ,"Cohere" , " TogetherAI" ,"Ollama" , "Huggingface " ],
201
+ choices = ["Groq" ,"OpenAI" ,"TogetherAI" ,"Ollama" ],
208
202
value = "OpenAI" ,
209
203
)
210
204
model2 = gr .Textbox (label = "Model" , value = "gpt-4o" , )
@@ -230,19 +224,12 @@ def closeBtnHide(output_final):
230
224
value = 1000 ,
231
225
step = 8 ,
232
226
)
233
- context_window = gr .Slider (
234
- label = "Context Window" ,
235
- minimum = 512 ,
236
- maximum = 8192 ,
237
- value = 4096 ,
238
- step = 8 ,
239
- )
240
- num_output = gr .Slider (
241
- label = "Output Num" ,
242
- minimum = 256 ,
243
- maximum = 8192 ,
244
- value = 512 ,
245
- step = 8 ,
227
+ temperature = gr .Slider (
228
+ label = "Temperature" ,
229
+ minimum = 0 ,
230
+ maximum = 1.0 ,
231
+ value = 0.3 ,
232
+ step = 0.1 ,
246
233
)
247
234
rpm = gr .Slider (
248
235
label = "Request Per Minute" ,
@@ -251,6 +238,10 @@ def closeBtnHide(output_final):
251
238
value = 60 ,
252
239
step = 1 ,
253
240
)
241
+ # json_mode = gr.Checkbox(
242
+ # False,
243
+ # label="Json Mode",
244
+ # )
254
245
with gr .Column (scale = 4 ):
255
246
source_text = gr .Textbox (
256
247
label = "Source Text" ,
@@ -275,14 +266,14 @@ def closeBtnHide(output_final):
275
266
close = gr .Button (value = "Stop" , visible = False )
276
267
277
268
switchBtn .click (fn = switch , inputs = [source_lang ,source_text ,target_lang ,output_final ], outputs = [source_lang ,source_text ,target_lang ,output_final ])
278
-
269
+
279
270
menuBtn .click (fn = update_menu , inputs = visible , outputs = [visible , menubar ], js = JS )
280
271
endpoint .change (fn = update_model , inputs = [endpoint ], outputs = [model ])
281
-
272
+
282
273
choice .select (fn = enable_sec , inputs = [choice ], outputs = [AddEndpoint ])
283
274
endpoint2 .change (fn = update_model , inputs = [endpoint2 ], outputs = [model2 ])
284
-
285
- start_ta = submit .click (fn = huanik , inputs = [endpoint , model , api_key , choice , endpoint2 , model2 , api_key2 , source_lang , target_lang , source_text , country , max_tokens , context_window , num_output , rpm ], outputs = [output_init , output_reflect , output_final , output_diff ])
275
+
276
+ start_ta = submit .click (fn = huanik , inputs = [endpoint , model , api_key , choice , endpoint2 , model2 , api_key2 , source_lang , target_lang , source_text , country , max_tokens , temperature , rpm ], outputs = [output_init , output_reflect , output_final , output_diff ])
286
277
upload .upload (fn = read_doc , inputs = upload , outputs = source_text )
287
278
output_final .change (fn = export_txt , inputs = output_final , outputs = [export ])
288
279
0 commit comments