Update Thailand Covid Data #9405
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Update Thailand Covid Data | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
branches-ignore: | |
- "releases/**" | |
schedule: # can be up to 20min late. So better to have more updates | |
# - cron: "*/19 23 * * *" # Quick checks for briefing after 7am | |
# - cron: "55 23 * * SUN" # after midnight is delayed | |
- cron: "00 06 * * MON,WED" # 13:00 Dashboard | |
# - cron: "*/15 00-02 * * *" # Quick checks for briefing after 7am (but often after 8:30am) | |
# - cron: "45 23 * * *" # 07:15 Dashboard | |
# - cron: "02 00 * * *" # 08:02 Dashboard | |
# - cron: "25 00 * * *" # 08:45 Dashboard | |
# - cron: "25 05 * * *" # 12:25 daily briefing - official tweet | |
# - cron: "*/59 06-09 * * MON" # Quick checks for briefing after 1pm | |
# - cron: "00 06 * * *" # 13:00 daily briefing - briefing pdf | |
# - cron: "40 06 * * *" # 13:40 daily briefing - briefing pdf | |
# - cron: "50 07 * * *" # 14:50 in case briefing was uploaded late - #TODO: currently even later | |
# - cron: "20 10 * * *" # 17:20 new PUI number | |
# - cron: "20 14 * * *" # 23:20 test data/situation report/vaccination report | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
# permissions: | |
# statuses: write | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
submodules: recursive | |
# - name: Get Cached docs | |
# uses: actions/checkout@v2 | |
# with: | |
# repository: djay/covid_thailand_inputs | |
# path: inputs | |
# lfs: true | |
- uses: szenius/set-timezone@v1.0 | |
with: | |
timezoneLinux: "Asia/Bangkok" | |
- name: Get current date | |
id: date | |
run: | | |
echo "::set-output name=date::$(TZ=Asia/Bangkok date +'%Y-%m-%d')" | |
echo "::set-output name=yesterday::$(TZ=Asia/Bangkok date --date=yesterday +'%Y-%m-%d')" | |
echo "::set-output name=month::$(TZ=Asia/Bangkok date +'%Y-%m')" | |
echo "::set-output name=year::$(TZ=Asia/Bangkok date +'%Y')" | |
echo "::set-output name=hour::$(TZ=Asia/Bangkok date +'%H')" | |
echo "::set-output name=time::$(TZ=Asia/Bangkok date +'%H:%M')" | |
echo "::set-output name=briefingurl::http://media.thaigov.go.th/uploads/public_img/source/$(TZ=Asia/Bangkok date +'%d%m')$(($(TZ=Asia/Bangkok date +'%Y')-1957)).pdf" | |
echo "::set-output name=briefingfile::inputs/briefings/$(TZ=Asia/Bangkok date +'%d%m')$(($(TZ=Asia/Bangkok date +'%Y')-1957)).pdf" | |
- name: Check briefing doc available | |
id: briefing | |
continue-on-error: true | |
uses: jtalk/url-health-check-action@v1.5 | |
with: | |
url: ${{ steps.date.outputs.briefingurl }} | |
follow-redirect: yes | |
- name: Check api available | |
id: api | |
continue-on-error: true | |
run: | | |
(curl -sL https://covid19.ddc.moph.go.th/api/Cases/today-cases-by-provinces | grep ${{ steps.date.outputs.date }}) | |
- name: api available or it's a push | |
id: goready | |
if: ${{ (github.event_name != 'schedule' || steps.api.outcome == 'success') }} | |
run: | | |
echo "'${{ github.event_name }}'(github.event_name)' != 'schedule' OR" | |
echo "${{ steps.date.outputs.hour }}(steps.date.outputs.hour) < 13 OR" | |
echo "'${{ steps.briefing.outcome }}'(steps.briefing.outcome) == 'success' AND" | |
- name: Get release datasets | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "datasets.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract datasets | |
run: | | |
tar -xzvf datasets.tar.gz | |
rm datasets.tar.gz | |
- name: Api not done yet | |
id: goapi | |
if: ${{ steps.api.outcome == 'success' }} | |
continue-on-error: true | |
run: (! grep ${{ steps.date.outputs.date }} api/cases_by_province.csv ) | |
- name: Dash not done yet | |
id: dash | |
# if: ${{ steps.api.outcome == 'success' && steps.goapi.outcome != 'success' }} | |
continue-on-error: true | |
run: (! grep ${{ steps.date.outputs.date }} api/moph_dashboard_prov.csv ) | |
- name: Cache multiple paths | |
id: cache | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: pat-s/always-upload-cache@v2.1.5 | |
with: | |
path: | | |
~/.cache/pip | |
${{ env.pythonLocation }} | |
/tmp/tika-server.jar | |
/tmp/tika-server.jar.md5 | |
# key is evaluated only at the start so no way we can based it on files downloaded | |
# without checking them all first. Time is hack that will cause caching on every run | |
# see https://github.com/actions/cache/issues/135 | |
key: ${{ runner.os }}-${{ steps.date.outputs.date }}-${{ steps.date.outputs.time }} | |
restore-keys: | | |
${{ runner.os }}-${{ steps.date.outputs.date }} | |
${{ runner.os }}-${{ steps.date.outputs.yesterday }} | |
${{ runner.os }}-${{ steps.date.outputs.month }} | |
${{ runner.os }}-${{ steps.date.outputs.year }} | |
${{ runner.os }} | |
- name: Get release with cached docs | |
id: docsrelease | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "inputs.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Get release with cached briefings | |
id: briefingssrelease | |
continue-on-error: true | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "briefings.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Get release with cached situation | |
continue-on-error: true | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "situation.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Get release with cached vaccinations | |
continue-on-error: true | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "vaccinations.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Get release with cached testing | |
continue-on-error: true | |
if: ${{ steps.goready.outcome == 'success' || steps.dash.outcome == 'success' || steps.briefing.outcome == 'success'}} | |
uses: dsaltares/fetch-gh-release-asset@master | |
with: | |
file: "testing.tar.gz" | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract cached docs # TODO; should be before or after proper cache? or conditional on cache being empty? | |
if: ${{ steps.docsrelease.outcome == 'success' }} | |
run: | | |
tar -xzvf inputs.tar.gz | |
tar -xzvf briefings.tar.gz || true | |
tar -xzvf situation.tar.gz || true | |
tar -xzvf vaccinations.tar.gz || true | |
tar -xzvf testing.tar.gz || true | |
rm inputs.tar.gz briefings.tar.gz situation.tar.gz vaccinations.tar.gz testing.tar.gz || true | |
- name: du | |
if: ${{ steps.docsrelease.outcome == 'success' }} | |
run: | | |
du -ha * | sort -h | |
- name: go if briefing ready | |
id: gofull | |
if: ${{ github.event_name != 'schedule' || (steps.briefing.outcome == 'success' && hashFiles(steps.date.outputs.briefingfile) == '') }} | |
run: | | |
echo "'${{ github.event_name }}'(github.event_name)' != 'schedule' OR" | |
echo "${{ steps.date.outputs.hour }}(steps.date.outputs.hour) < 13 OR" | |
echo "('${{ steps.briefing.outcome }}'(steps.briefing.outcome) == 'success' AND" | |
echo " '${{ hashFiles(steps.date.outputs.briefingfile) }}'(hashFiles(${{ steps.date.outputs.briefingfile }}) == '')" | |
- name: Apt update | |
if: ${{ steps.gofull.outcome == 'success' || steps.goapi.outcome == 'success' || steps.dash.outcome == 'success' }} | |
id: dep_apt | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y openvpn unzip ghostscript python3-tk | |
# sudo apt-get update -y --ignore-missing || true | |
# uses: awalsh128/cache-apt-pkgs-action@latest | |
# with: | |
# packages: openvpn unzip ghostscript python3-tk | |
# version: 1.7 | |
# execute_install_scripts: true | |
- name: Set up Python | |
id: goscrape | |
if: ${{ steps.gofull.outcome == 'success' || steps.goapi.outcome == 'success' || steps.dash.outcome == 'success' }} | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.9.15 | |
cache: 'pip' | |
- uses: actions/setup-java@v1 | |
if: ${{ steps.goscrape.outcome == 'success' }} | |
with: | |
java-version: 15 | |
- name: Install dependencies | |
if: ${{ steps.goscrape.outcome == 'success' }} | |
id: dep | |
env: | |
TIKA_VERSION: 2.5.0 | |
run: | | |
python3 -m pip install --upgrade pip | |
pip install wheel | |
pip install -r requirements.txt | |
python -c 'from ctypes.util import find_library; print(find_library("gs"))' | |
# Prep tika so gets a new version outside of VPN | |
tika-python config mime-types || true | |
# - name: Setup VPN | |
# id: setup-vpn | |
# run: | | |
# sudo wget https://my.surfshark.com/vpn/api/v1/server/configurations | |
# sudo unzip configurations | |
# (echo -e "${{ secrets.OVPN_USERNAME }}\n${{ secrets.OVPN_PASSWORD }}") > userpass.txt | |
# sudo openvpn --config th-bkk.prod.surfshark.com_tcp.ovpn --daemon --auth-user-pass userpass.txt | |
# rm userpass.txt | |
# # Wait for OpenVPN to connect | |
# sleep 5 | |
# tail -30 /var/log/syslog | |
# while ! sudo pgrep openvpn > /dev/null; do | |
# sleep 1 | |
# done | |
# while ! sudo grep -q "Initialization Sequence Completed" /var/log/syslog; do | |
# sleep 1 | |
# done | |
# # Do something after OpenVPN connects | |
# echo "OpenVPN connected" | |
- name: is dash ready | |
id: godash | |
if: ${{ steps.dep.outcome == 'success' && steps.dep.dash != 'success' && github.event_name == 'schedule'}} | |
continue-on-error: true | |
# run: python -c "import covid_data_dash; covid_data_dash.check_dash_ready()" | |
run: true | |
- name: Scrape Covid Data API | |
id: updateapi | |
if: ${{ steps.gofull.outcome != 'success' && steps.godash.outcome != 'success' && steps.goapi.outcome == 'success' }} | |
run: | | |
# sudo docker run -t -p 8118:8118 -p 9050:9050 -l "TH" -d dperson/torproxy | |
python covid_data_api.py | |
env: | |
# TIKA_VERSION: 1.24 # Prevent delays in upgrades | |
DRIVE_API_KEY: ${{ secrets.DRIVE_API_KEY }} | |
- name: Scrape Covid Data | |
id: update | |
if: ${{ steps.gofull.outcome == 'success' || steps.godash.outcome == 'success' || github.event_name != 'schedule' }} | |
run: | | |
python covid_plot.py | |
env: | |
# TIKA_VERSION: 1.24 # Prevent delays in upgrades | |
DRIVE_API_KEY: ${{ secrets.DRIVE_API_KEY }} | |
- name: Run tests # Do after because we don't download new files to make quicker | |
if: ${{ github.event_name != 'schedule'}} | |
run: | | |
# sudo docker run -t -p 8118:8118 -p 9050:9050 -l "TH" -d dperson/torproxy | |
pytest | |
env: | |
# TIKA_VERSION: 1.24 # Prevent delays in upgrades | |
DRIVE_API_KEY: ${{ secrets.DRIVE_API_KEY }} | |
- name: outputfiles.txt | |
continue-on-error: true | |
run: | |
du inputs | tee outputfiles.txt | |
# - name: Stop VPN | |
# id: stop-vpn | |
# run: | | |
# sudo pkill -f "openvpn" | |
- id: can-deploy | |
env: | |
MY_KEY: ${{ secrets.NETLIFY_AUTH_TOKEN }} | |
if: "${{ env.MY_KEY != '' }}" | |
run: echo "::set-output name=key::true" | |
# - name: Push cached docs to repo | |
# run: | | |
# cd inputs | |
# git config user.name github-actions | |
# git config user.email github-actions@github.com | |
# git add . | |
# git commit -m "generated" | |
# git push | |
- uses: ruby/setup-ruby@v1 | |
id: gonetlify | |
if: ${{ (steps.update.outcome == 'success' || steps.updateapi.outcome == 'success') && steps.can-deploy.outputs.key == 'true' }} | |
with: | |
ruby-version: 2.7.2 # Not needed with a .ruby-version file | |
bundler-cache: true # runs 'bundle install' and caches installed gems automatically | |
- name: Make links relative so PRs still work | |
if: ${{ steps.gonetlify.outcome == 'success' && github.ref != 'refs/heads/main'}} | |
run: | | |
sed -i 's#https://practical-ritchie-cca141\.netlify\.app#.#g' README.md | |
- name: Jekyll Build | |
if: ${{ steps.gonetlify.outcome == 'success' }} | |
run: | | |
bundle config set --local path vendor/bundle | |
bundler install --jobs 4 --retry 3 | |
sed -i 's#!\[.*\](\(.*_.[^\)]*\)\.png)#<a href="\1.svg" target="_black"><object width="100%" type="image/svg+xml" data="\1.svg"><img src="\1.png" /></object></a>#g' README.md | |
sed -Ei 's#\[(.*)\](\(.*_.[^\)]*\)\.png)#<a href="\2.svg" target="_black">\1</a>#g' README.md | |
cp *.md docs | |
cp -R assets docs/assets | |
cp _config.yml docs/_config.yml | |
bundle exec jekyll build --destination=./_site | |
- name: Inject slug/short variables | |
uses: rlespinasse/github-slug-action@v3.x | |
- name: Deploy gh-pages | |
if: ${{ steps.gonetlify.outcome == 'success' && github.ref == 'refs/heads/main' }} | |
uses: peaceiris/actions-gh-pages@v3 | |
with: | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
publish_dir: ./docs | |
publish_branch: gh-pages | |
enable_jekyll: true | |
force_orphan: true | |
- name: path urls for netlify | |
if: ${{ steps.gonetlify.outcome == 'success' }} | |
run: | | |
sed -i 's/https:\/\/practical-ritchie-cca141.netlify.app//g' README.md | |
sed -i 's/https:\/\/practical-ritchie-cca141.netlify.app//g' downloads.md | |
- name: Deploy to Netlify | |
if: ${{ steps.gonetlify.outcome == 'success' }} | |
id: netlify | |
uses: nwtgck/actions-netlify@v1.2.2 | |
with: | |
publish-dir: './_site' | |
production-branch: main | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
deploy-message: "Deploy from GitHub Actions" | |
enable-pull-request-comment: false | |
enable-commit-comment: true | |
overwrites-pull-request-comment: true | |
env: | |
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} | |
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} | |
timeout-minutes: 3 | |
# - name: Upload png to wiki | |
# if: ${{ steps.netlify.outcome == 'success' && github.ref == 'refs/heads/main' }} | |
# uses: kai-tub/external-repo-sync-action@v1 | |
# with: | |
# source-directory: "outputs" | |
# include-patterns: "*.png" | |
# env: | |
# GH_ACCESS_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} | |
# - name: Upload json to wiki | |
# if: ${{ steps.netlify.outcome == 'success' && github.ref == 'refs/heads/main' }} | |
# uses: kai-tub/external-repo-sync-action@v1 | |
# with: | |
# source-directory: "api" | |
# include-patterns: "*" | |
# env: | |
# GH_ACCESS_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} | |
- name: Run tests # Do after because we don't download new files to make quicker | |
if: ${{ github.event_name != 'schedule'}} | |
run: | | |
# sudo docker run -t -p 8118:8118 -p 9050:9050 -l "TH" -d dperson/torproxy | |
pytest | |
env: | |
TIKA_VERSION: 1.24 # Prevent delays in upgrades | |
DRIVE_API_KEY: ${{ secrets.DRIVE_API_KEY }} | |
- name: Make cached docs bundle | |
id: makerelease | |
if: ${{ steps.netlify.outcome == 'success' && github.ref == 'refs/heads/main' }} | |
run: | | |
tar -czvf briefings.tar.gz -T <(\ls -1 inputs/briefings/*) | |
tar -czvf situation.tar.gz -T <(\ls -1 inputs/situation_*/*) | |
tar -czvf vaccinations.tar.gz -T <(\ls -1 inputs/vaccinations/*) | |
tar -czvf testing.tar.gz -T <(\ls -1 inputs/testing_moph/* inputs/variants/*) | |
tar --exclude="inputs/.git" --exclude="_site" --exclude="._*" --exclude="inputs/json/*.csv" --exclude="inputs/briefings/*" --exclude="inputs/situation_*/*" --exclude="inputs/vaccinations/*" --exclude="inputs/variants/*" --exclude="inputs/testing_moph/*" -czvf inputs.tar.gz inputs | |
find api inputs/json -path "api/*" -o -name "*.csv" | tar -czvf datasets.tar.gz -T - | |
- uses: "marvinpinto/action-automatic-releases@latest" | |
if: ${{ steps.makerelease.outcome == 'success' }} | |
with: | |
repo_token: "${{ secrets.GITHUB_TOKEN }}" | |
prerelease: false | |
automatic_release_tag: ${{ steps.date.outputs.date }} | |
title: Dataset and input files for covidthailand for ${{ steps.date.outputs.date }} | |
files: | | |
briefings.tar.gz | |
situation.tar.gz | |
vaccinations.tar.gz | |
testing.tar.gz | |
datasets.tar.gz | |
inputs.tar.gz | |
- name: Upload inputs.tar.gz to existing release | |
if: ${{ steps.makerelease.outcome == 'success' }} | |
uses: svenstaro/upload-release-action@v2 | |
with: | |
repo_token: ${{ secrets.GITHUB_TOKEN }} | |
file: ./inputs.tar.gz | |
asset_name: inputs.tar.gz | |
tag: 1 | |
overwrite: true | |
body: "Updated docs" | |
- name: Upload datasets.tar.gz to existing release | |
if: ${{ steps.makerelease.outcome == 'success' }} | |
uses: svenstaro/upload-release-action@v2 | |
with: | |
repo_token: ${{ secrets.GITHUB_TOKEN }} | |
file: ./datasets.tar.gz | |
asset_name: datasets.tar.gz | |
tag: 1 | |
overwrite: true | |
body: "Updated docs" | |
- name: Upload vaccinations.tar.gz to existing release | |
if: ${{ steps.makerelease.outcome == 'success' }} | |
uses: svenstaro/upload-release-action@v2 | |
with: | |
repo_token: ${{ secrets.GITHUB_TOKEN }} | |
file: ./vaccinations.tar.gz | |
asset_name: vaccinations.tar.gz | |
tag: 1 | |
overwrite: true | |
body: "Updated vaccinations" | |
- name: Upload testing.tar.gz to existing release | |
if: ${{ steps.makerelease.outcome == 'success' }} | |
uses: svenstaro/upload-release-action@v2 | |
with: | |
repo_token: ${{ secrets.GITHUB_TOKEN }} | |
file: ./testing.tar.gz | |
asset_name: testing.tar.gz | |
tag: 1 | |
overwrite: true | |
body: "Updated testing" | |
# - name: Upload briefings.tar.gz to existing release | |
# if: ${{ steps.makerelease.outcome == 'success' }} | |
# uses: svenstaro/upload-release-action@v2 | |
# with: | |
# repo_token: ${{ secrets.GITHUB_TOKEN }} | |
# file: ./briefings.tar.gz | |
# asset_name: briefings.tar.gz | |
# tag: 1 | |
# overwrite: true | |
# body: "Updated briefings" | |
# - name: Upload situation.tar.gz to existing release | |
# if: ${{ steps.makerelease.outcome == 'success' }} | |
# uses: svenstaro/upload-release-action@v2 | |
# with: | |
# repo_token: ${{ secrets.GITHUB_TOKEN }} | |
# file: ./situation.tar.gz | |
# asset_name: situation.tar.gz | |
# tag: 1 | |
# overwrite: true | |
# body: "Updated situation" | |