-
Notifications
You must be signed in to change notification settings - Fork 0
/
scheduler.py
66 lines (54 loc) · 1.94 KB
/
scheduler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/usr/bin/env python
# encoding: utf-8
import logging
import time
from apscheduler.schedulers.blocking import BlockingScheduler
from logging.config import fileConfig
from os.path import dirname, join
from blockchain import settings
from blockchain.api import BlockchainAPIClient
from blockchain.pipelines import MongoDBPipeline
# Custom logger
fileConfig(join(dirname(dirname(__file__)), 'logging.cfg'))
logger = logging.getLogger(__name__)
scheduler = BlockingScheduler()
def fetch_and_persist_data(data, *args, **kwargs):
"""
Get and save data from Blockchain API.
:param str data: type of data (charts, stats, pools).
"""
# Retrieve blockchain data
api = BlockchainAPIClient(data)
result = api.call(**kwargs)
# Persist retrieved data
logger.info('Persisting fetched data in MongoDB: %s', result)
mongo = MongoDBPipeline.config()
mongo.open_connection()
mongo.persist_data(result.response)
mongo.close_connection()
logger.info('Data successfully persisted.')
time.sleep(2)
@scheduler.scheduled_job(id='charts', trigger='cron', day_of_week='mon-sun', hour=0)
def charts_job():
"""
Get and save blockchain charts data from Blockchain API.
"""
for chart in settings.CHARTS:
logger.info('Fetching %s chart data.', chart)
fetch_and_persist_data('charts', chart=chart, timespan='all')
@scheduler.scheduled_job(id='stats', trigger='cron', day_of_week='mon-sun', hour=0)
def stats_job():
"""
Get and save blockchain stats data from Blockchain API.
"""
logger.info('Fetching blockchain statistical data.')
fetch_and_persist_data('stats')
@scheduler.scheduled_job(id='pools', trigger='cron', day_of_week='mon-sun', hour=0)
def pools_job():
"""
Get and save blockchain pools data from Blockchain API.
"""
logger.info('Fetching bitcoin mining pools data.')
fetch_and_persist_data('pools', timespan='5days')
# Start queueing jobs
scheduler.start()