-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathalert_on_new_query_results.py
executable file
·142 lines (114 loc) · 4.26 KB
/
alert_on_new_query_results.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
#!/usr/bin/env python3
import argparse
import json
import logging
import logging.config
import os
import expertvoice_client
APP_NAME = "expertvoice_alert_on_new_query_results"
# TODO overhaul seen listings method
# instead of being date based, we should instead just pull product IDs
# if it's in seen listings, don't alert on it (but track it)
# at end of execution, write all results (ignoring seen listings) to seen_listings file
def main():
parser = argparse.ArgumentParser()
query_group = parser.add_mutually_exclusive_group(required=True)
query_group.add_argument(
"-q", "--query-name", type=str, nargs=1, help="The name of the query to execute"
)
query_group.add_argument(
"--all",
action="store_true",
help="If set, execute all queries for the configured data source",
)
parser.add_argument(
"-l",
"--list-queries",
action="store_true",
help="If set, list all queries that can be executed "
"for the current data source and exit",
)
parser.add_argument(
"--markdown",
action="store_true",
help="If set, log URLs in markdown format (for gotify)",
)
parser.add_argument(
"--config",
type=str,
default="./config.json",
help="The path to a configuration file to use. Defaults to ./config.json",
)
args = parser.parse_args()
# load config
with open(args.config) as f:
config = json.load(f)
# logging setup
logging.config.dictConfig(config.get("logging", {"version": 1}))
logger = logging.getLogger(APP_NAME)
if args.list_queries:
print("Saved queries: %s" % (", ".join(sorted(config["saved_queries"].keys()))))
return
# init seen listings
seen_listings_filename = config.get("seen_listings_filename", "seen_listings.json")
if os.path.isfile(seen_listings_filename):
with open(seen_listings_filename, "r") as f:
seen_listings = json.load(f)
else:
seen_listings = dict()
if args.all:
queries_to_run = config["saved_queries"]
else:
queries_to_run = {args.query_name: config["saved_queries"][args.query_name]}
ev = expertvoice_client.ExpertvoiceClient(config)
new_seen_listings = dict()
for query_name, query_json in queries_to_run.items():
query_res = ev.search_products(**query_json)
alert_queue = list()
for listing in query_res:
item_id = str(listing["productCode"])
new_seen_listings[item_id] = ""
# skip seen listings
if item_id in seen_listings:
continue
listing["url"] = ev.get_product_url(
listing["orgId"], listing["productCode"]
)
alert_queue.append(listing)
if alert_queue:
formatted_msg_lines = [
f'{len(alert_queue)} new results for ExpertVoice query "{query_name}"',
"",
]
for alert in alert_queue:
if args.markdown:
alert_lines = [
f"[{alert['brand']} - {alert['name']}]({alert['url']}):",
"",
f"price: {alert['price']}, msrp: {alert['msrp']}",
"",
]
else:
alert_lines = [
f"{alert['brand']} - {alert['name']}:",
f"price: {alert['price']}, msrp: {alert['msrp']}",
alert["url"],
"",
]
formatted_msg_lines.extend(alert_lines)
logger.info("\n".join(formatted_msg_lines))
# save new results of seen listings
# but before we do, trim the stale entries
# TODO how to determine stale entries?
# I guess it'd be "anything we didn't see"
# doesn't have to be time-based
keys_to_drop = list()
# for item_id, end_time in seen_listings.items():
# if now > datetime.datetime.fromisoformat(end_time):
# keys_to_drop.append(item_id)
# for item_id in keys_to_drop:
# del seen_listings[item_id]
with open(seen_listings_filename, "w") as f:
json.dump(new_seen_listings, f)
if __name__ == "__main__":
main()