Skip to content

Commit

Permalink
Merge branch 'main' of github.com:TailUFPB/LinguifAI
Browse files Browse the repository at this point in the history
  • Loading branch information
tahaluh committed Mar 28, 2024
2 parents 5010144 + 5e3056b commit 381109c
Show file tree
Hide file tree
Showing 7 changed files with 324 additions and 20 deletions.
23 changes: 19 additions & 4 deletions api/DataProcesser.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

from NbNewsModel import news_prediction
from NbEmotionsModel import make_prediction
from NbLinRegressionModel import make_prediction_nblin
Expand All @@ -11,6 +12,7 @@
import re
import joblib
import string
import os

import nltk
from nltk.corpus import stopwords
Expand All @@ -27,7 +29,7 @@ def handle_classify(self, df, classifier):
model_name = classifier_switcher[classifier]
if model_name.endswith('.pkl'):
return self.pretrained_predict(df, model_name)
elif model_name.endswith('.keras'):
elif model_name.endswith('.h5'):
return self.trained_predict(df, model_name)
#classifier_switcher = {
# 0: self.classify_emotions,
Expand All @@ -49,7 +51,7 @@ def generate_statistics(self, df):
return statistics

def preprocess_text(self, text):
text = text.lower()
text = str(text).lower()
text = re.sub('\[.*?\]', '', text)
text = re.sub("\\W", " ", text)
text = re.sub('https?://\S+|www\.\S+', '', text)
Expand Down Expand Up @@ -82,9 +84,19 @@ def pretrained_predict(self, df, model_name):
df['output_column'] = predictions
return df

def load_weights_and_model(self, name):
model_filename = f"api/models/{name}"
num_classes = model_filename[model_filename.index("/") + 1, model_filename.index("-")]
model = tf.keras.Sequential([
tf.keras.layers.Embedding(input_dim=20000, output_dim=128),
tf.keras.layers.LSTM(64),
tf.keras.layers.Dense(int(num_classes), activation='softmax')
])
model.load_weights(model_filename)
return model

def trained_predict(self, df, model_name):
model_file = f'api/models/{model_name}'
model = load_model(model_file)
model = self.load_weights_and_model(model_name)

encoder_re = r'Trained-Model-(.*?).keras'
encoder_name = re.search(encoder_re, model_name).group(1)
Expand All @@ -102,4 +114,7 @@ def trained_predict(self, df, model_name):
df['output_column'] = predicted_labels

return df


##TODO métodos com o processamento de classificação

4 changes: 3 additions & 1 deletion api/Neural_Network2.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
dirname = os.path.dirname(__file__)

def preprocess_text(text):
text = text.lower()
text = str(text).lower()
text = re.sub('\[.*?\]', '', text)
text = re.sub("\\W", " ", text)
text = re.sub('https?://\S+|www\.\S+', '', text)
Expand Down Expand Up @@ -84,6 +84,8 @@ def create_and_train_model(train_texts, train_labels, name, epochs=5, batch_size
joblib.dump(label_encoder, label_mapping_file)

tfidf_vectorizer = TfidfVectorizer(max_features=20000)

train_texts = [preprocess_text(text) for text in train_texts]
train_texts_tfidf = tfidf_vectorizer.fit_transform(train_texts)

# Cria um conjunto de dados de texto usando a API de conjuntos de dados do TensorFlow
Expand Down
6 changes: 5 additions & 1 deletion api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,13 +97,17 @@ def train_model():
def get_training_status():
try:
with open('training_progress.json', 'r') as file:
data = json.load(file)
try:
data = json.load(file)
except json.decoder.JSONDecodeError:
return jsonify({'training_in_progress': True, 'training_progress': 0})
training_status = data.get('training_in_progress', False)
progress = data.get('training_progress', 0)
return jsonify({'training_in_progress': training_status, 'training_progress': progress})
except FileNotFoundError:
return jsonify({'training_in_progress': False, 'training_progress': 0})


#@app.teardown_appcontext
#def teardown_appcontext(error=None):
#shutdown_server()
Expand Down
161 changes: 161 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@
"typescript": "^4.9.5"
},
"scripts": {
"start": "react-scripts start",
"start": "concurrently \"npm run start:react\" \"npm run start:flask\"",
"start:react": "react-scripts start",
"start:flask": "python api/app.py",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
Expand All @@ -55,6 +57,7 @@
]
},
"devDependencies": {
"concurrently": "^8.2.2",
"electron": "^26.1.0",
"tailwindcss": "^3.3.3"
}
Expand Down
46 changes: 39 additions & 7 deletions src/pages/train.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,47 @@ export default function Train() {

console.log(sendData);

const response = await axios
.post("http://localhost:5000/neural-network", sendData)
.catch((error) => {
console.error(error.response.data);
});

if (response && response.data) {
console.log(response.data);
const maxRetries = 3;
let retryCount = 0;


const url = "http://localhost:5000/neural-network";


async function postData(url: string, data: { data: any[]; label: any[]; batch_size: number; epochs: number; learning_rate: number; name: string; }) {
try {
const response = await axios.post(url, data);
} catch (error) {
if (retryCount < maxRetries) {
retryCount++;
console.error(`Error occurred, retrying (attempt ${retryCount})...`);
postData(url, data); // Retry recursively
} else {
console.error("Max retry limit reached. Unable to post data.");
throw error; // Throw the error after maximum retries
}
}
}

await axios
.post("http://localhost:5000/neural-network", sendData)
.catch(async (error) => {
await axios
.post("http://localhost:5000/neural-network", sendData)
.catch(async (error) => {
await axios
.post("http://localhost:5000/neural-network", sendData)
.catch(async (error) => {
await axios
.post("http://localhost:5000/neural-network", sendData)
.catch((error) => {
console.error(error.response.data);
})
})
})
});

setIsLoading(false);
};

Expand Down
Loading

0 comments on commit 381109c

Please sign in to comment.