forked from adityadutta/MHacks12
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathGoogle_Key_Test.py
158 lines (124 loc) · 4.88 KB
/
Google_Key_Test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
from app import DatabaseManager, Note
class NoteAnalysis:
def __init__(self, note1_key, note2_key):
self.note1_key = note1_key
self.note2_key = note2_key
def run_quickstart(self):
from fuzzywuzzy import fuzz
# [START language_quickstart]
# Imports the Google Cloud client library
# [START language_python_migration_imports]
from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types
# [END language_python_migration_imports]
from database import DatabaseManager, Note
dataIn = DatabaseManager("notes")
dataOut = DatabaseManager("super_notes")
# Instantiates a client
# [START language_python_migration_client]
client = language.LanguageServiceClient()
# [END language_python_migration_client]
text1 = dataOut.get_note_key(self.note1_key)["note"]
text2 = dataIn.get_note_key(self.note2_key)["note"]
# __________________________
# READ VALUE 1
# __________________________
# with open('input1.txt', 'r') as file:
# text1 = file.read().replace('\n', '')
# ___________________________
# READ VALUE 2
# ___________________________
# with open('input2.txt', 'r') as file2:
# text2 = file2.read().replace('\n', '')
words1 = text1.split(".")
words2 = text2.split(".")
for x in words1:
if (x[:1] == " "):
x = x[1:]
for x in words2:
if (x[:1] == " "):
x = x[1:]
keywords1 = []
key_sentances1 = ""
key_sent_array_1 = []
keywords2 = []
key_sentances2 = ""
key_sent_array_2 = []
# The text to analyze
document1 = types.Document(
content=text1,
type=enums.Document.Type.PLAIN_TEXT)
document2 = types.Document(
content=text2,
type=enums.Document.Type.PLAIN_TEXT)
outputText = ""
# Detects the sentiment of the text
response1 = client.analyze_entities(document=document1, encoding_type='UTF32',)
for entity in response1.entities:
if entity.salience>0.015:
keywords1.append(entity.name)
print('=' * 20)
print('name: {0}'.format(entity.name))
print('type: {0}'.format(entity.type))
print('metadata: {0}'.format(entity.metadata))
print('salience: {0}'.format(entity.salience))
response2 = client.analyze_entities(document=document2, encoding_type='UTF32', )
for entity in response2.entities:
if entity.salience > 0.015:
keywords2.append(entity.name)
print('=' * 20)
print('name: {0}'.format(entity.name))
print('type: {0}'.format(entity.type))
print('metadata: {0}'.format(entity.metadata))
print('salience: {0}'.format(entity.salience))
print("Keys 1:", keywords1)
print("Keys 2:", keywords2)
for x in words1:
for i in keywords1:
if(x.find(i)>-1) and x not in key_sentances1:
key_sentances1 += x + "\n"
key_sent_array_1.append(x)
for x in words2:
for i in keywords2:
if(x.find(i)>-1) and x not in key_sentances2:
key_sentances2 += x + "\n"
key_sent_array_2.append(x)
#print(key_sentances2)
#out = open("output1.txt", "w")
#out.write(key_sentances1)
#out.close()
#out = open("output2.txt", "w")
#out.write(key_sentances2)
#out.close()
newVals = [" "]
for x in key_sent_array_1:
canAdd = True
for i in newVals:
Token_Set_Ratio = fuzz.token_set_ratio(x, i)
if Token_Set_Ratio > 80:
canAdd = False
if canAdd:
newVals.append(x)
for x in key_sent_array_2:
canAdd = True
for i in newVals:
Token_Set_Ratio = fuzz.token_set_ratio(x, i)
if Token_Set_Ratio > 50:
canAdd = False
if canAdd:
newVals.append(x)
newValsString = ""
for x in newVals:
newValsString += x + "\n"
#writing to database
super_note = Note(2, "physics", newValsString)
dataOut.add_note_to_db(super_note)
#_______________________________________
# ADDING OUTPUT
#_______________________________________
# final = open("final.txt", "w")
# final.write(newValsString)
# final.close()
return newValsString
# n = NoteAnalysis("-LqyiulvtclaFSFsC4_Q", "-Lqyl7NHN9vWsMeJYBIM")