-
Notifications
You must be signed in to change notification settings - Fork 13
83 lines (69 loc) · 2.75 KB
/
data_update_hospitalization.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
name: data_hospitalization.json update
on:
schedule:
# Run every 8 hours
- cron: 0 */8 * * *
workflow_dispatch: {}
jobs:
# This workflow contains a single job called "build"
build:
runs-on: ubuntu-latest
steps:
# Checkout the main repo
- name: Checkout Main repo
uses: actions/checkout@v2
with:
ssh-key: ${{ secrets.data_daily_update }}
path: site
# Checkout data scraper repo
- name: Checkout Data Scraper
uses: actions/checkout@v2
with:
ssh-key: ${{ secrets.data_daily_update }}
repository: sfbrigade/data-covid19-sfbayarea
path: scraper
# The scraper uses Python 3.7+, so make sure we've got the latest 3.x
- name: Set up Python 3.x
uses: actions/setup-python@v1
with:
python-version: '3.x'
- name: Cache Python Dependencies
uses: actions/cache@v2
with:
path: ~/.cache/pip
# NOTE: we can hash these requirements files together, but keeping them
# separate allows us to load a partial cache if only the dev
# requirements have changed.
key: ${{ runner.os }}-pip-${{ hashFiles('scraper/requirements.txt') }}-${{ hashFiles('scraper/requirements-dev.txt') }}
restore-keys: |
${{ runner.os }}-pip-${{ hashFiles('scraper/requirements.txt') }}-
${{ runner.os }}-pip-
# Install dependencies
# - The commit that was checked out will be available as $SCRAPER_COMMIT.
- name: Install libxml2-dev libxslt-dev
run: sudo apt-get install libxml2-dev libxslt-dev
- name: Install Data Scraper & Dependencies
run: |
cd ${GITHUB_WORKSPACE}/scraper
python -m pip install --upgrade pip
pip install -r requirements.txt;
# Keep track of the version used so we can use it in commit messages
echo "SCRAPER_COMMIT='$(git rev-parse HEAD)'" >> $GITHUB_ENV
- name: Scrape Data
run: |
echo "SCRAPER_TIME='$(date)'" >> $GITHUB_ENV
cd ${GITHUB_WORKSPACE}/scraper
python scraper_hospital_data.py > ${GITHUB_WORKSPACE}/site/data/data_hospitalization.json
- name: Commit Changes
run: |
cd ${GITHUB_WORKSPACE}/site
git config user.name ${{ secrets.githubaction_config_user_name }}
git config user.email ${{ secrets.githubaction_config_user_email }}
git add data/data_hospitalization.json
git commit -F - << EOF
GitHubAction: hospitalization data update
Created with commit ${{env.SCRAPER_COMMIT}} from sfbrigade/data-covid19-sfbayarea
https://github.com/sfbrigade/data-covid19-sfbayarea/commit/${{env.SCRAPER_COMMIT}}
EOF
git push
echo 'Git commit and push completed for the daily hospitalization data update.'