1- import sys
21import os
3-
4- # Add the project root to the Python path
5- project_root = os .path .abspath (os .path .join (os .path .dirname (__file__ ), '..' , '..' ))
6- sys .path .insert (0 , project_root )
7-
82import re
93import gradio as gr
104from glob import glob
11- from app .webui .process import model_load , diff_texts , translator , translator_sec
12- from llama_index .core import SimpleDirectoryReader
5+ from process import model_load , diff_texts , translator , translator_sec , extract_docx , extract_pdf , extract_text
136
147def huanik (
158 endpoint : str ,
@@ -24,16 +17,15 @@ def huanik(
2417 source_text : str ,
2518 country : str ,
2619 max_tokens : int ,
27- context_window : int ,
28- num_output : int ,
20+ temperature : int ,
2921 rpm : int ,
3022):
3123
3224 if not source_text or source_lang == target_lang :
3325 raise gr .Error ("Please check that the content or options are entered correctly." )
3426
3527 try :
36- model_load (endpoint , model , api_key , context_window , num_output , rpm )
28+ model_load (endpoint , model , api_key , temperature , rpm )
3729 except Exception as e :
3830 raise gr .Error (f"An unexpected error occurred: { e } " )
3931
@@ -44,8 +36,6 @@ def huanik(
4436 endpoint2 = endpoint2 ,
4537 model2 = model2 ,
4638 api_key2 = api_key2 ,
47- context_window = context_window ,
48- num_output = num_output ,
4939 source_lang = source_lang ,
5040 target_lang = target_lang ,
5141 source_text = source_text ,
@@ -76,20 +66,24 @@ def update_model(endpoint):
7666 endpoint_model_map = {
7767 "Groq" : "llama3-70b-8192" ,
7868 "OpenAI" : "gpt-4o" ,
79- "Cohere" : "command-r" ,
8069 "TogetherAI" : "Qwen/Qwen2-72B-Instruct" ,
8170 "Ollama" : "llama3" ,
82- "Huggingface" : "mistralai/Mistral-7B-Instruct-v0.3"
8371 }
8472 return gr .update (value = endpoint_model_map [endpoint ])
8573
86- def read_doc (file ):
87- docs = SimpleDirectoryReader (input_files = [file ]).load_data ()
88- texts = ""
89- for doc in docs :
90- texts += doc .text
91- texts = re .sub (r'(?m)^\s*$\n?' , '' , texts )
92- return texts
74+ def read_doc (path ):
75+ file_type = path .split ("." )[- 1 ]
76+ print (file_type )
77+ if file_type in ["pdf" , "txt" , "py" , "docx" , "json" , "cpp" , "md" ]:
78+ if file_type .endswith ("pdf" ):
79+ content = extract_pdf (path )
80+ elif file_type .endswith ("docx" ):
81+ content = extract_docx (path )
82+ else :
83+ content = extract_text (path )
84+ return re .sub (r'(?m)^\s*$\n?' , '' , content )
85+ else :
86+ raise gr .Error ("Oops, unsupported files." )
9387
9488def enable_sec (choice ):
9589 if choice :
@@ -195,7 +189,7 @@ def closeBtnHide(output_final):
195189 with gr .Column (scale = 1 ) as menubar :
196190 endpoint = gr .Dropdown (
197191 label = "Endpoint" ,
198- choices = ["Groq" ,"OpenAI" ,"Cohere" , " TogetherAI" ,"Ollama" , "Huggingface " ],
192+ choices = ["Groq" ,"OpenAI" ,"TogetherAI" ,"Ollama" ],
199193 value = "OpenAI" ,
200194 )
201195 choice = gr .Checkbox (label = "Additional Endpoint" , info = "Additional endpoint for reflection" )
@@ -204,7 +198,7 @@ def closeBtnHide(output_final):
204198 with gr .Column (visible = False ) as AddEndpoint :
205199 endpoint2 = gr .Dropdown (
206200 label = "Additional Endpoint" ,
207- choices = ["Groq" ,"OpenAI" ,"Cohere" , " TogetherAI" ,"Ollama" , "Huggingface " ],
201+ choices = ["Groq" ,"OpenAI" ,"TogetherAI" ,"Ollama" ],
208202 value = "OpenAI" ,
209203 )
210204 model2 = gr .Textbox (label = "Model" , value = "gpt-4o" , )
@@ -230,19 +224,12 @@ def closeBtnHide(output_final):
230224 value = 1000 ,
231225 step = 8 ,
232226 )
233- context_window = gr .Slider (
234- label = "Context Window" ,
235- minimum = 512 ,
236- maximum = 8192 ,
237- value = 4096 ,
238- step = 8 ,
239- )
240- num_output = gr .Slider (
241- label = "Output Num" ,
242- minimum = 256 ,
243- maximum = 8192 ,
244- value = 512 ,
245- step = 8 ,
227+ temperature = gr .Slider (
228+ label = "Temperature" ,
229+ minimum = 0 ,
230+ maximum = 1.0 ,
231+ value = 0.3 ,
232+ step = 0.1 ,
246233 )
247234 rpm = gr .Slider (
248235 label = "Request Per Minute" ,
@@ -251,6 +238,10 @@ def closeBtnHide(output_final):
251238 value = 60 ,
252239 step = 1 ,
253240 )
241+ # json_mode = gr.Checkbox(
242+ # False,
243+ # label="Json Mode",
244+ # )
254245 with gr .Column (scale = 4 ):
255246 source_text = gr .Textbox (
256247 label = "Source Text" ,
@@ -275,14 +266,14 @@ def closeBtnHide(output_final):
275266 close = gr .Button (value = "Stop" , visible = False )
276267
277268 switchBtn .click (fn = switch , inputs = [source_lang ,source_text ,target_lang ,output_final ], outputs = [source_lang ,source_text ,target_lang ,output_final ])
278-
269+
279270 menuBtn .click (fn = update_menu , inputs = visible , outputs = [visible , menubar ], js = JS )
280271 endpoint .change (fn = update_model , inputs = [endpoint ], outputs = [model ])
281-
272+
282273 choice .select (fn = enable_sec , inputs = [choice ], outputs = [AddEndpoint ])
283274 endpoint2 .change (fn = update_model , inputs = [endpoint2 ], outputs = [model2 ])
284-
285- start_ta = submit .click (fn = huanik , inputs = [endpoint , model , api_key , choice , endpoint2 , model2 , api_key2 , source_lang , target_lang , source_text , country , max_tokens , context_window , num_output , rpm ], outputs = [output_init , output_reflect , output_final , output_diff ])
275+
276+ start_ta = submit .click (fn = huanik , inputs = [endpoint , model , api_key , choice , endpoint2 , model2 , api_key2 , source_lang , target_lang , source_text , country , max_tokens , temperature , rpm ], outputs = [output_init , output_reflect , output_final , output_diff ])
286277 upload .upload (fn = read_doc , inputs = upload , outputs = source_text )
287278 output_final .change (fn = export_txt , inputs = output_final , outputs = [export ])
288279
0 commit comments