diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 6b2ab824..925cf137 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -82,8 +82,8 @@ jobs: run: pip install -r api/requirements.txt pyinstaller==6.6.0 - name: Build flask exe - run: pyinstaller --name app --onefile --console api/app.py --hidden-import=tiktoken_ext.openai_public --hidden-import=tiktoken_ext - + run: pyinstaller api/app.spec + - name: Install dependencies run: npm install diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1a7a4092..dbf49572 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -82,7 +82,7 @@ jobs: run: pip install -r api/requirements.txt pyinstaller==6.6.0 - name: Build flask exe - run: pyinstaller --name app --onefile --console api/app.py --hidden-import=tiktoken_ext.openai_public --hidden-import=tiktoken_ext + run: pyinstaller app.spec - name: Install dependencies run: npm install diff --git a/README.md b/README.md index 0e7df2fc..74026e3f 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,35 @@ # LinguifAI + Projeto NLP 2 - 2023.2 ## Tecnologias utilizadas: + O projeto utiliza das seguintes tecnologias e ferramentas: ### Front-End -* React -* React-Router -* Axios -* ElectronJs -* Tailwind + +- React +- React-Router +- Axios +- ElectronJs +- Tailwind ### Back-End -* Python -* Flask + +- Python +- Flask ## Como rodar a aplicação: -Primeiramente, instale as depencencias: +Primeiramente, instale as dependências: + +Dependências do back-end: + +```bash +pip install -r api/requirements.txt +``` + +Dependências do front-end: ```bash yarn @@ -48,6 +60,6 @@ npm run electron - [x] Abrir CSV - [x] Exibir Preview de N linhas do CSV - [x] Escolher colunas de entrada -- [X] Selecionar classificador e enviar dados +- [x] Selecionar classificador e enviar dados - [x] Exibir resultado do classificador -- [x] Exportar resultado em csv? \ No newline at end of file +- [x] Exportar resultado em csv? diff --git a/api/Neural_Network2.py b/api/Neural_Network2.py index 51b58dd2..32a65182 100644 --- a/api/Neural_Network2.py +++ b/api/Neural_Network2.py @@ -381,7 +381,7 @@ def create_and_train_rnn_model(df, name, epochs = 10, batch_size = 32, learning_ if len(valid_losses) > 2 and all(valid_loss >= loss for loss in valid_losses[-3:]): print('Stopping early due to lack of improvement in validation loss.') - break + # break train_losses.append(train_loss) valid_losses.append(valid_loss) diff --git a/api/app.py b/api/app.py index c315844e..c1ceb7a3 100644 --- a/api/app.py +++ b/api/app.py @@ -58,7 +58,10 @@ def chat(): chat_history = data.get('history', []) api_key = data.get('apikey') + if df is not None: + + print(df.head(1)) documents = split_dataframe_into_documents(df) embeddings = OpenAIEmbeddings(api_key=api_key) @@ -74,27 +77,24 @@ def chat(): return jsonify(reply=bot_reply) else: - print("No df") - return jsonify(reply="No data available."), 400 - - # messages = [{"role": "system", "content": "You are a helpful assistant."}] - # for msg in chat_history: - # messages.append({"role": "user" if msg['origin'] == 'user' else "assistant", "content": msg['text']}) - # messages.append({"role": "user", "content": user_message}) - - # try: - # client = openai.OpenAI(api_key = api_key) - # response = client.chat.completions.create( - # model="gpt-3.5-turbo", - # messages=messages, - # max_tokens=200 - # ) - # bot_reply = response.choices[0].message.content.strip() - - # return jsonify(reply=bot_reply) - # except Exception as e: - # print(f"Error: {e}") - # return jsonify(reply="Desculpe, ocorreu um erro ao processar sua mensagem."), 500 + messages = [{"role": "system", "content": "You are a helpful assistant."}] + for msg in chat_history: + messages.append({"role": "user" if msg['origin'] == 'user' else "assistant", "content": msg['text']}) + messages.append({"role": "user", "content": user_message}) + + try: + client = openai.OpenAI(api_key = api_key) + response = client.chat.completions.create( + model="gpt-3.5-turbo", + messages=messages, + max_tokens=200 + ) + bot_reply = response.choices[0].message.content.strip() + + return jsonify(reply=bot_reply) + except Exception as e: + print(f"Error: {e}") + return jsonify(reply="Desculpe, ocorreu um erro ao processar sua mensagem."), 500 def shutdown_server(): @@ -116,7 +116,7 @@ def receive_file(): if file.filename == '': return jsonify({'error': 'No selected file'}), 400 if file: - df = pd.read_csv(file) + df = pd.read_csv(file, on_bad_lines='skip') return jsonify({'message': 'File uploaded successfully'}), 200 @@ -282,40 +282,6 @@ def apikey(): return jsonify(reply="Desculpe, ocorreu um erro ao processar sua mensagem."), 500 -# @app.route('/chat', methods=['POST']) -# def chat(): -# global df -# if df is not None: -# # run rag -# print(df.head(1)) -# else: -# print("No df") -# data = request.get_json() -# user_message = data.get('message') -# chat_history = data.get('history', []) -# api_key = data.get('apikey') - -# messages = [{"role": "system", "content": "You are a helpful assistant."}] -# for msg in chat_history: -# messages.append({"role": "user" if msg['origin'] == 'user' else "assistant", "content": msg['text']}) -# messages.append({"role": "user", "content": user_message}) - -# try: -# client = openai.OpenAI(api_key = api_key) -# response = client.chat.completions.create( -# model="gpt-3.5-turbo", # ou a gente poderia ver com gpt 4 mas por enquanto coloquei 3.5 -# messages=messages, -# max_tokens=200 -# ) -# bot_reply = response.choices[0].message.content.strip() - -# return jsonify(reply=bot_reply) -# except Exception as e: -# print(f"Error: {e}") -# return jsonify(reply="Desculpe, ocorreu um erro ao processar sua mensagem."), 500 - - - if __name__ == '__main__': training_progress = { 'training_progress': 0, diff --git a/api/app.spec b/api/app.spec index d5abf94e..46b741c0 100644 --- a/api/app.spec +++ b/api/app.spec @@ -1,3 +1,4 @@ + # -*- mode: python ; coding: utf-8 -*- entry_point = 'app.py' @@ -9,7 +10,7 @@ a = Analysis( pathex=[], binaries=[], datas=[], - hiddenimports=[], + hiddenimports=['tiktoken_ext.openai_public', 'tiktoken_ext'], hookspath=[], hooksconfig={}, runtime_hooks=[], @@ -48,4 +49,4 @@ coll = COLLECT( upx=True, upx_exclude=[], name='app', -) +) \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 886813fb..a8bdf0c2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "LinguifAI", - "version": "0.2.5", + "version": "0.2.8", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "LinguifAI", - "version": "0.2.5", + "version": "0.2.8", "dependencies": { "@emotion/react": "^11.11.1", "@emotion/styled": "^11.11.0", @@ -30,6 +30,8 @@ "react-papaparse": "^4.1.0", "react-router-dom": "^6.15.0", "react-scripts": "5.0.0", + "react-spinners": "^0.13.8", + "react-toastify": "^10.0.5", "react-transition-group": "^4.4.5", "serve": "^14.2.1", "typescript": "^4.9.5" @@ -7326,9 +7328,9 @@ } }, "node_modules/clsx": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz", - "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", "engines": { "node": ">=6" } @@ -17862,6 +17864,27 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, + "node_modules/react-spinners": { + "version": "0.13.8", + "resolved": "https://registry.npmjs.org/react-spinners/-/react-spinners-0.13.8.tgz", + "integrity": "sha512-3e+k56lUkPj0vb5NDXPVFAOkPC//XyhKPJjvcGjyMNPWsBKpplfeyialP74G7H7+It7KzhtET+MvGqbKgAqpZA==", + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-toastify": { + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/react-toastify/-/react-toastify-10.0.5.tgz", + "integrity": "sha512-mNKt2jBXJg4O7pSdbNUfDdTsK9FIdikfsIE/yUCxbAEXl4HMyJaivrVFcn3Elvt5xvCQYhUZm+hqTIu1UXM3Pw==", + "dependencies": { + "clsx": "^2.1.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + } + }, "node_modules/react-transition-group": { "version": "4.4.5", "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", diff --git a/package.json b/package.json index 89037c7f..c8ec883c 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "name": "Cameron", "email": "cameron.maloney@warriorlife.net" }, - "version": "0.2.5", + "version": "0.2.8", "main": "./public/electron.js", "homepage": "./", "private": true, @@ -32,6 +32,8 @@ "react-papaparse": "^4.1.0", "react-router-dom": "^6.15.0", "react-scripts": "5.0.0", + "react-spinners": "^0.13.8", + "react-toastify": "^10.0.5", "react-transition-group": "^4.4.5", "serve": "^14.2.1", "typescript": "^4.9.5" diff --git a/src/components/chatbot/chatbot.tsx b/src/components/chatbot/chatbot.tsx index b7113369..52ac255f 100644 --- a/src/components/chatbot/chatbot.tsx +++ b/src/components/chatbot/chatbot.tsx @@ -50,21 +50,22 @@ const ChatBot: React.FC = () => { const errorMessage: Message = { text: "Desculpe, ocorreu um erro. Tente novamente.", origin: 'bot' }; setChatHistory(prevHistory => [...prevHistory, errorMessage]); } - + console.log(message) setMessage(""); + console.log(message) }; const sendAPIKeyMessage = () => { setChatHistory(prevHistory => [ ...prevHistory, - { text: "Olá! Eu sou o (LinguiTalk ou LinguaBot). Coloca a sua chave:", origin: 'bot' } + { text: "Olá! Eu sou o LinguiTalk, um chatbot para lhe auxiliar na exploração dos seus dados! Primeiro, insira uma chave API válida do ChatGPT:", origin: 'bot' } ]); }; const sendInitialMessage = () => { setChatHistory(prevHistory => [ ...prevHistory, - { text: "Olá! Eu sou o (LinguiTalk ou LinguaBot). Como posso ajudar?", origin: 'bot' } + { text: "Olá! Eu sou o LinguiTalk. Como posso ajudar?", origin: 'bot' } ]); }; @@ -98,7 +99,7 @@ const ChatBot: React.FC = () => { {isOpen && (
-

LinguiTalk ou LinguaBot

+

LinguiTalk

+ ); + + if (currentPage > 3) { + pageNumbers.push(...); + } + + const startPage = Math.max(1, currentPage - 1); + const endPage = Math.min(totalPages - 2, currentPage + 1); + + for (let i = startPage; i <= endPage; i++) { + pageNumbers.push( + + ); + } + + if (currentPage < totalPages - 4) { + pageNumbers.push(...); + } + + if (totalPages > 1) { + pageNumbers.push( + + ); + } + + return pageNumbers; + }; + + return ( +
+ +
+ +
+ {renderPageNumbers()} +
+ +
+
+ ); +} \ No newline at end of file diff --git a/src/components/selectFileCard/selectFileCard.tsx b/src/components/selectFileCard/selectFileCard.tsx index e02b3e6e..131fd836 100644 --- a/src/components/selectFileCard/selectFileCard.tsx +++ b/src/components/selectFileCard/selectFileCard.tsx @@ -2,9 +2,9 @@ import { Icon } from "@iconify/react"; import { ChangeEvent, useState } from "react"; import Papa from "papaparse"; import CsvTable from "../csvTable/csvTable"; -import { Link } from "@mui/material"; +import { Button } from "@mui/material"; -interface Props { +interface props { selectedFile: File | null; setSelectedFile: (file: File | null) => void; setData: (data: any[][]) => void; @@ -13,6 +13,8 @@ interface Props { setHeader: (header: string[]) => void; } +const ITEMS_PER_PAGE = 6; + export default function SelectFileCard({ selectedFile, setSelectedFile, @@ -20,38 +22,40 @@ export default function SelectFileCard({ data, header, setHeader, -}: Props) { +}: props) { const [isDragging, setIsDragging] = useState(false); + const [currentPage, setCurrentPage] = useState(0); - // Handle file selection from file input const handleFileChange = async (event: ChangeEvent) => { const file = event.target.files?.[0]; if (file && file.name.endsWith(".csv")) { setSelectedFile(file); - parseCSV(file); + + Papa.parse(file, { + header: true, + dynamicTyping: true, + skipEmptyLines: true, + complete(results) { + let chaves = Object.keys(results.data[0] || []); + + let data: any[][] = results.data.map((row: any) => { + let newRow: any[] = []; + chaves.forEach((chave) => { + newRow.push(row[chave]); + }); + return newRow; + }); + + setData(data); + setHeader(chaves); + setCurrentPage(0); // Reset to first page + }, + }); } else { setSelectedFile(null); } }; - // Parse CSV file - const parseCSV = (file: File) => { - Papa.parse(file, { - header: true, - dynamicTyping: true, - skipEmptyLines: true, - complete(results) { - const keys = Object.keys(results.data[0] || []); - const data: any[][] = results.data.map((row: any) => { - return keys.map((key) => row[key]); - }); - setData(data); - setHeader(keys); - }, - }); - }; - - // Handle file drop const handleDrop = (event: React.DragEvent) => { setIsDragging(false); event.preventDefault(); @@ -59,7 +63,6 @@ export default function SelectFileCard({ const file = event.dataTransfer.files[0]; if (file && file.name.endsWith(".csv")) { setSelectedFile(file); - parseCSV(file); } else { setSelectedFile(null); } @@ -74,6 +77,70 @@ export default function SelectFileCard({ setIsDragging(false); }; + const handlePageChange = (page: number) => { + setCurrentPage(page); + }; + + const displayedData = data.slice( + currentPage * ITEMS_PER_PAGE, + (currentPage + 1) * ITEMS_PER_PAGE + ); + + const totalPages = Math.ceil(data.length / ITEMS_PER_PAGE); + + const renderPageNumbers = (): JSX.Element[] => { + const pageNumbers: JSX.Element[] = []; + + if (totalPages <= 1) return pageNumbers; + + pageNumbers.push( + + ); + + if (currentPage > 3) { + pageNumbers.push(...); + } + + const startPage = Math.max(1, currentPage - 1); + const endPage = Math.min(totalPages - 2, currentPage + 1); + + for (let i = startPage; i <= endPage; i++) { + pageNumbers.push( + + ); + } + + if (currentPage < totalPages - 4) { + pageNumbers.push(...); + } + + if (totalPages > 1) { + pageNumbers.push( + + ); + } + + return pageNumbers; + }; + return !selectedFile ? (
0 ? `w-4/5` : `w-2/5` @@ -121,7 +188,30 @@ export default function SelectFileCard({ onDragOver={handleDragOver} onDragLeave={handleDragLeave} > - {data.length > 0 && } + {data.length > 0 && ( + <> + +
+ +
+ {renderPageNumbers()} +
+ +
+ + )}
); -} +} \ No newline at end of file diff --git a/src/pages/layout/header.tsx b/src/pages/layout/header.tsx index d1a90c7a..38d02887 100644 --- a/src/pages/layout/header.tsx +++ b/src/pages/layout/header.tsx @@ -8,14 +8,14 @@ interface HeaderProps { const Header: React.FC = ({ title }) => { return ( -
+
-
- Logo da LinguifAI -

+
+ Logo da LinguifAI +

LinguifAI

-

+
); }; diff --git a/src/pages/views/homeView.tsx b/src/pages/views/homeView.tsx index ad60840d..b36d2b0f 100644 --- a/src/pages/views/homeView.tsx +++ b/src/pages/views/homeView.tsx @@ -2,6 +2,7 @@ import React, { useState, useEffect } from "react"; import axios from "axios"; import SelectFileCard from "../../components/selectFileCard/selectFileCard"; import ResultTable from "../../components/resultTable/resultTable"; +import { ClipLoader } from "react-spinners"; export default function HomeView() { const [selectedFile, setSelectedFile] = useState(null); @@ -108,7 +109,11 @@ export default function HomeView() { }; if (!isBackendAvailable) { - return
Carregando backend...
; + return ( +
+ +
+ ); } return ( @@ -161,12 +166,12 @@ export default function HomeView() { ))}
-
+
{Object.keys(result).length > 0 && ( @@ -179,7 +184,7 @@ export default function HomeView() { />
)} +
); } diff --git a/yarn.lock b/yarn.lock index faf41fa4..cecaf240 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4121,10 +4121,10 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -clsx@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz" - integrity sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q== +clsx@^2.0.0, clsx@^2.1.0: + version "2.1.1" + resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== co@^4.6.0: version "4.6.0" @@ -9902,7 +9902,7 @@ react-dev-utils@^12.0.0: strip-ansi "^6.0.1" text-table "^0.2.0" -"react-dom@^17.0.0 || ^18.0.0", react-dom@^18.0.0, react-dom@^18.2.0, react-dom@>=16.6.0, react-dom@>=16.8, react-dom@>=16.8.0: +"react-dom@^16.0.0 || ^17.0.0 || ^18.0.0", "react-dom@^17.0.0 || ^18.0.0", react-dom@^18.0.0, react-dom@^18.2.0, react-dom@>=16.6.0, react-dom@>=16.8, react-dom@>=16.8.0, react-dom@>=18: version "18.2.0" resolved "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz" integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== @@ -10023,6 +10023,18 @@ react-scripts@5.0.0: optionalDependencies: fsevents "^2.3.2" +react-spinners@^0.13.8: + version "0.13.8" + resolved "https://registry.npmjs.org/react-spinners/-/react-spinners-0.13.8.tgz" + integrity sha512-3e+k56lUkPj0vb5NDXPVFAOkPC//XyhKPJjvcGjyMNPWsBKpplfeyialP74G7H7+It7KzhtET+MvGqbKgAqpZA== + +react-toastify@^10.0.5: + version "10.0.5" + resolved "https://registry.npmjs.org/react-toastify/-/react-toastify-10.0.5.tgz" + integrity sha512-mNKt2jBXJg4O7pSdbNUfDdTsK9FIdikfsIE/yUCxbAEXl4HMyJaivrVFcn3Elvt5xvCQYhUZm+hqTIu1UXM3Pw== + dependencies: + clsx "^2.1.0" + react-transition-group@^4.4.5: version "4.4.5" resolved "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz" @@ -10033,7 +10045,7 @@ react-transition-group@^4.4.5: loose-envify "^1.4.0" prop-types "^15.6.2" -react@*, "react@^17.0.0 || ^18.0.0", react@^18.0.0, react@^18.2.0, "react@>= 16", react@>=0.13, react@>=16, react@>=16.6.0, react@>=16.8, react@>=16.8.0: +react@*, "react@^16.0.0 || ^17.0.0 || ^18.0.0", "react@^17.0.0 || ^18.0.0", react@^18.0.0, react@^18.2.0, "react@>= 16", react@>=0.13, react@>=16, react@>=16.6.0, react@>=16.8, react@>=16.8.0, react@>=18: version "18.2.0" resolved "https://registry.npmjs.org/react/-/react-18.2.0.tgz" integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==