-
Notifications
You must be signed in to change notification settings - Fork 0
/
chat.py
60 lines (51 loc) · 1.85 KB
/
chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import random
import json
import torch
from model import Net
from utils import DataProcess
#################################################################################################
# Cargamos el modelo entrenado #
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
with open("../MIKOBOT/intents.json", "r", encoding="utf-8") as f:
intents = json.load(f)
FILE = "data.pth"
data = torch.load(FILE)
preprocess_dataset = DataProcess()
input_size = data["input_size"]
hidden_size = data["hidden_size"]
output_size = data["ouput_size"]
all_words = data["all_words"]
tags = data["tags"]
model_state = data["model_state"]
model = Net(input_size, hidden_size, output_size).to(device)
model.load_state_dict(model_state)
model.eval()
#########################################################################################################
def chat(sentece):
"""
Función:
chat:
- Esta función permite realizar la inferencia del chatbot.
Argumentos:
sentence:
- Se debe agregar un mensaje (str)
Retorna:
- No retornaré el mensaje realizado por el bot.
"""
bot_name = "MikoBOT"
sentence = preprocess_dataset.tokenize(sentece)
x = preprocess_dataset.bag_word(sentence, all_words)
x = x.reshape(1, x.shape[0])
x = torch.from_numpy(x).to(device)
x = x.float()
output = model(x)
_, predictions = torch.max(output, dim=1)
tag = tags[predictions.item()]
probs = torch.softmax(output, dim=1)
prob = probs[0][predictions.item()]
if prob.item() > 0.75:
for intent in intents["intents"]:
if tag == intent["tag"]:
return f"{bot_name}: { random.choice(intent['responses'])}"
else:
return f"{bot_name}: Aun estoy aprendiendo su lenguaje..."