Skip to content

fix(site): center table of relations #71

fix(site): center table of relations

fix(site): center table of relations #71

Workflow file for this run

name: Scrape
on:
push:
branches: [ main ]
workflow_dispatch:
jobs:
build_binaries:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: cache golang artifacts
uses: actions/cache@v3
with:
path: |
~/.cache/go-build
~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-1.17
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version: 1.17
# TODO: lint go
### ./bin/scrape_mariadb_docs
- name: cache ./bin/scrape_mariadb_docs
uses: actions/cache@v3
id: cache-scrape_mariadb_docs
with:
path: ./bin/scrape_mariadb_docs
key: "scrape_mariadb_docs-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/mariadb/scrape_docs/*')}}"
- name: build ./bin/scrape_mariadb_docs
if: steps.cache-scrape_mariadb_docs.outputs.cache-hit != 'true'
run: make ./bin/scrape_mariadb_docs
- name: upload scrape_mariadb_docs
uses: actions/upload-artifact@v3
with:
name: scrape_mariadb_docs
path: ./bin/scrape_mariadb_docs
### ./bin/scrape_mssql_docs
- name: cache scrape_mssql_docs
uses: actions/cache@v3
id: cache-scrape_mssql_docs
with:
path: ./bin/scrape_mssql_docs
key: "scrape_mssql_docs-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/mssql/scrape_docs/*')}}"
- name: build scrape_mssql_docs
if: steps.cache-scrape_mssql_docs.outputs.cache-hit != 'true'
run: make ./bin/scrape_mssql_docs
- name: upload scrape_mssql_docs
uses: actions/upload-artifact@v3
with:
name: scrape_mssql_docs
path: ./bin/scrape_mssql_docs
### ./bin/scrape_postgres_docs
- name: cache scrape_postgres_docs
uses: actions/cache@v3
id: cache-scrape_postgres_docs
with:
path: ./bin/scrape_postgres_docs
key: "scrape_postgres_docs-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/postgres/scrape_docs/*')}}"
- name: build scrape_postgres_docs
if: steps.cache-scrape_postgres_docs.outputs.cache-hit != 'true'
run: make ./bin/scrape_postgres_docs
- name: upload scrape_postgres_docs
uses: actions/upload-artifact@v3
with:
name: scrape_postgres_docs
path: ./bin/scrape_postgres_docs
### ./bin/scrape_tidb_docs
- name: cache scrape_tidb_docs
uses: actions/cache@v3
id: cache-scrape_tidb_docs
with:
path: ./bin/scrape_tidb_docs
key: "scrape_tidb_docs-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/tidb/scrape_docs/*')}}"
- name: build scrape_tidb_docs
if: steps.cache-scrape_tidb_docs.outputs.cache-hit != 'true'
run: make ./bin/scrape_tidb_docs
- name: upload scrape_tidb_docs
uses: actions/upload-artifact@v3
with:
name: scrape_tidb_docs
path: ./bin/scrape_tidb_docs
### ./bin/observe_mariadb
- name: cache observe_mariadb
uses: actions/cache@v3
id: cache-observe_mariadb
with:
path: ./bin/observe_mariadb
key: "observe_mariadb-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/mariadb/observe/*')}}"
- name: build observe_mariadb
if: steps.cache-observe_mariadb.outputs.cache-hit != 'true'
run: make ./bin/observe_mariadb
- name: upload observe_mariadb
uses: actions/upload-artifact@v3
with:
name: observe_mariadb
path: ./bin/observe_mariadb
### ./bin/observe_mysql
- name: cache observe_mysql
uses: actions/cache@v3
id: cache-observe_mysql
with:
path: ./bin/observe_mysql
key: "observe_mysql-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/mysql/observe/*')}}"
- name: build observe_mysql
if: steps.cache-observe_mysql.outputs.cache-hit != 'true'
run: make ./bin/observe_mysql
- name: upload observe_mysql
uses: actions/upload-artifact@v3
with:
name: observe_mysql
path: ./bin/observe_mysql
### ./bin/observe_postgres
- name: cache observe_postgres
uses: actions/cache@v3
id: cache-observe_postgres
with:
path: ./bin/observe_postgres
key: "observe_postgres-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/postgres/observe/*')}}"
- name: build observe_postgres
if: steps.cache-observe_postgres.outputs.cache-hit != 'true'
run: make ./bin/observe_postgres
- name: upload observe_postgres
uses: actions/upload-artifact@v3
with:
name: observe_postgres
path: ./bin/observe_postgres
# ### ./bin/{}
# - name: cache {}
# uses: actions/cache@v3
# id: cache-{}
# with:
# path: ./bin/{}
# key: "{}-${{hashFiles('pkg/schema/*', 'pkg/utils/*', 'cmd/')}}"
# - name: build {}
# if: steps.cache-{}.outputs.cache-hit != 'true'
# run: make ./bin/{}
# - name: upload {}
# uses: actions/upload-artifact@v3
# with:
# name: {}
# path: ./bin/{}
observe_mariadb:
needs: [build_binaries]
services:
mariadb-10.2.41:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.2.41
ports:
- 3308:3036
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mariadb-10.3.32:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.3.32
ports:
- 3309:3036
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mariadb-10.4.22:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.4.22
ports:
- 3310:3036
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mariadb-10.5.13:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.5.13
ports:
- 3311:3036
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mariadb-10.6.5:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.6.5
ports:
- 3312:3036
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mariadb-10.7.1:
env: {MYSQL_ROOT_PASSWORD: password}
image: docker.io/library/mariadb:10.7.1
ports:
- 3313:3306
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/mariadb
- name: pull observe_mariadb binary
uses: actions/download-artifact@v3
with:
name: observe_mariadb
path: ./bin
- run: chmod +x ./bin/*
- name: cache mariadb observations
uses: actions/cache@v3
id: cache-mariadb-observations
with:
path: ./data/mariadb/observed.sqlite
key: mariadb-observations-${{hashFiles('./bin/observe_mariadb')}}
- name: observe mariadb information_schemata
if: steps.cache-mariadb-observations.outputs.cache-hit != 'true'
run: ./bin/observe_mariadb || true # for whatever reason, only 10.7.1 gets observed, all others time out
- name: upload mariadb observations
uses: actions/upload-artifact@v3
with:
name: mariadb_observations
path: ./data/mariadb/observed.sqlite
- name: Dump docker logs on failure
if: failure()
uses: jwalton/gh-docker-logs@v1
observe_mysql:
needs: [build_binaries]
services:
mysql-5.7:
image: docker.io/library/mysql:5.7
ports:
- 3306:3306
env: {MYSQL_ROOT_PASSWORD: password}
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
mysql-8.0:
image: docker.io/library/mysql:8.0
ports:
- 3307:3306
env: {MYSQL_ROOT_PASSWORD: password}
options: --health-cmd="mysqladmin -u root --password=password ping" --health-interval=10s --health-timeout=5s --health-retries=3
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/mysql
- name: pull observe_mysql binary
uses: actions/download-artifact@v3
with:
name: observe_mysql
path: ./bin
- run: chmod +x ./bin/*
- name: cache mysql observations
uses: actions/cache@v3
id: cache-mysql-observations
with:
path: ./data/mysql/observed.sqlite
key: mysql-observations-${{hashFiles('./bin/observe_mysql')}}
- name: observe mysql information_schemata
if: steps.cache-mysql-observations.outputs.cache-hit != 'true'
run: ./bin/observe_mysql
- name: upload mysql observations
uses: actions/upload-artifact@v3
with:
name: mysql_observations
path: ./data/mysql/observed.sqlite
observe_postgres:
needs: [build_binaries]
services:
postgres-10:
image: docker.io/library/postgres:10-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5432:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
postgres-11:
image: docker.io/library/postgres:11-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5433:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
postgres-12:
image: docker.io/library/postgres:12-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5434:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
postgres-13:
image: docker.io/library/postgres:13-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5435:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
postgres-14:
image: docker.io/library/postgres:14-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5436:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
postgres-15:
image: docker.io/library/postgres:15-alpine
env:
POSTGRES_PASSWORD: password
PGPASSWORD: password
ports:
- 5437:5432
options: --health-cmd "pg_isready -U postgres -h 127.0.0.1" --health-interval 1s --health-timeout 10s --health-retries 60
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/postgres
- name: pull observe_postgres binary
uses: actions/download-artifact@v3
with:
name: observe_postgres
path: ./bin
- run: chmod +x ./bin/*
- name: cache postgres observations
uses: actions/cache@v3
id: cache-postgres-observations
with:
path: ./data/postgres/observed.sqlite
key: postgres-observations-${{hashFiles('./bin/observe_postgres')}}
- name: observe postgres information_schemata
if: steps.cache-postgres-observations.outputs.cache-hit != 'true'
run: ./bin/observe_postgres
- name: upload postgres observations
uses: actions/upload-artifact@v3
with:
name: postgres_observations
path: ./data/postgres/observed.sqlite
# observe_{}:
# needs: [build_binaries]
# services:
# runs-on: ubuntu-latest
# steps:
# - name: prep fs
# run: mkdir -p ./bin ./data/{}
# - name: pull observe_{} binary
# uses: actions/download-artifact@v3
# with:
# name: observe_{}
# path: ./bin
# - run: chmod +x ./bin/*
# - name: cache {} observations
# uses: actions/cache@v3
# id: cache-{}-observations
# with:
# path: ./data/{}/observed.sqlite
# key: {}-observations-${{hashFiles('./bin/observe_{}')}}
# - name: observe {} information_schemata
# if: steps.cache-{}-observations.outputs.cache-hit != 'true'
# run: ./bin/observe_{}
# - name: upload {} observations
# uses: actions/upload-artifact@v3
# with:
# name: {}_observations
# path: ./data/{}/observed.sqlite
scrape_postgres_docs:
needs: [build_binaries]
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/postgres
- name: pull scrape_postgres_docs binary
uses: actions/download-artifact@v3
with:
name: scrape_postgres_docs
path: ./bin
- run: chmod +x ./bin/*
- name: cache postgres doc pages
uses: actions/cache@v3
id: cache-postgres-doc-pages
with:
path: ./.cache
key: postgres-docs-pages
- name: cache postgres docs
uses: actions/cache@v3
id: cache-postgres-docs
with:
path: ./data/postgres/docs.sqlite
key: postgres-docs-${{hashFiles('./bin/scrape_postgres_docs')}}
- name: scrape postgres information_schemata
if: steps.cache-postgres-docs.outputs.cache-hit != 'true'
run: ./bin/scrape_postgres_docs
- name: upload postgres docs
uses: actions/upload-artifact@v3
with:
name: postgres_docs
path: ./data/postgres/docs.sqlite
scrape_mssql_docs:
needs: [build_binaries]
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/mssql
- name: pull scrape_mssql_docs binary
uses: actions/download-artifact@v3
with:
name: scrape_mssql_docs
path: ./bin
- run: chmod +x ./bin/*
- name: cache mssql doc pages
uses: actions/cache@v3
id: cache-mssql-doc-pages
with:
path: ./.cache
key: mssql-docs-pages
- name: cache mssql docs
uses: actions/cache@v3
id: cache-mssql-docs
with:
path: ./data/mssql/docs.sqlite
key: mssql-docs-${{hashFiles('./bin/scrape_mssql_docs')}}
- name: scrape mssql information_schemata
if: steps.cache-mssql-docs.outputs.cache-hit != 'true'
run: ./bin/scrape_mssql_docs
- name: upload mssql docs
uses: actions/upload-artifact@v3
with:
name: mssql_docs
path: ./data/mssql/docs.sqlite
scrape_mariadb_docs:
needs: [build_binaries]
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/mariadb
- name: pull scrape_mariadb_docs binary
uses: actions/download-artifact@v3
with:
name: scrape_mariadb_docs
path: ./bin
- run: chmod +x ./bin/*
- name: cache mariadb doc pages
uses: actions/cache@v3
id: cache-mariadb-doc-pages
with:
path: ./.cache
key: mariadb-docs-pages
- name: cache mariadb docs
uses: actions/cache@v3
id: cache-mariadb-docs
with:
path: ./data/mariadb/docs.sqlite
key: mariadb-docs-${{hashFiles('./bin/scrape_mariadb_docs')}}
- name: scrape mariadb information_schemata
if: steps.cache-mariadb-docs.outputs.cache-hit != 'true'
run: ./bin/scrape_mariadb_docs
- name: upload mariadb docs
uses: actions/upload-artifact@v3
with:
name: mariadb_docs
path: ./data/mariadb/docs.sqlite
scrape_tidb_docs:
needs: [build_binaries]
runs-on: ubuntu-latest
steps:
- name: prep fs
run: mkdir -p ./bin ./data/tidb
- name: pull scrape_tidb_docs binary
uses: actions/download-artifact@v3
with:
name: scrape_tidb_docs
path: ./bin
- run: chmod +x ./bin/*
- name: cache tidb doc pages
uses: actions/cache@v3
id: cache-tidb-doc-pages
with:
path: ./.cache
key: tidb-docs-pages
- name: cache tidb docs
uses: actions/cache@v3
id: cache-tidb-docs
with:
path: ./data/tidb/docs.sqlite
key: tidb-docs-${{hashFiles('./bin/scrape_tidb_docs')}}
- name: scrape tidb information_schemata
if: steps.cache-tidb-docs.outputs.cache-hit != 'true'
run: ./bin/scrape_tidb_docs
- name: upload tidb docs
uses: actions/upload-artifact@v3
with:
name: tidb_docs
path: ./data/tidb/docs.sqlite
# scrape_{}_docs:
# needs: [build_binaries]
# runs-on: ubuntu-latest
# steps:
# - name: prep fs
# run: mkdir -p ./bin ./data/{}
# - name: pull scrape_{}_docs binary
# uses: actions/download-artifact@v3
# with:
# name: scrape_{}_docs
# path: ./bin
# - run: chmod +x ./bin/*
# - name: cache {} doc pages
# uses: actions/cache@v3
# id: cache-{}-doc-pages
# with:
# path: ./.cache
# key: {}-docs-pages
# - name: cache {} docs
# uses: actions/cache@v3
# id: cache-{}-docs
# with:
# path: ./data/{}/docs.sqlite
# key: {}-docs-${{hashFiles('./bin/scrape_{}_docs')}}
# - name: scrape {} information_schemata
# if: steps.cache-{}-docs.outputs.cache-hit != 'true'
# run: ./bin/scrape_{}_docs
# - name: upload {} docs
# uses: actions/upload-artifact@v3
# with:
# name: {}_docs
# path: ./data/{}/docs.sqlite
combine_outputs:
needs:
- observe_postgres
- observe_mariadb
- observe_mysql
- scrape_mariadb_docs
- scrape_postgres_docs
- scrape_mssql_docs
- scrape_tidb_docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: shellcheck
run: |
sudo apt update && sudo apt install -y shellcheck
make shellcheck
### merge docs
- name: retrieve scraped postgres docs
uses: actions/download-artifact@v3
with:
name: postgres_docs
path: ./data/postgres
- name: retrieve scraped mssql docs
uses: actions/download-artifact@v3
with:
name: mssql_docs
path: ./data/mssql
- name: retrieve scraped mariadb docs
uses: actions/download-artifact@v3
with:
name: mariadb_docs
path: ./data/mariadb
- name: retrieve scraped tidb docs
uses: actions/download-artifact@v3
with:
name: tidb_docs
path: ./data/tidb
- name: merge docs
run: |-
./scripts/merge/dbs.sh \
./data/merged.docs.sqlite \
./data/postgres/docs.sqlite \
./data/mssql/docs.sqlite \
./data/mariadb/docs.sqlite \
./data/tidb/docs.sqlite
# TODO: save artifact
### merge observations
- name: retrieve postgres observations
uses: actions/download-artifact@v3
with:
name: postgres_observations
path: ./data/postgres
- name: retrieve mysql observations
uses: actions/download-artifact@v3
with:
name: mysql_observations
path: ./data/mysql
- name: retrieve mariadb observations
uses: actions/download-artifact@v3
with:
name: mariadb_observations
path: ./data/mariadb
- name: merge observations
run: ./scripts/merge/dbs.sh ./data/merged.observations.sqlite ./data/mariadb/observed.sqlite ./data/mysql/observed.sqlite ./data/postgres/observed.sqlite
### merge observations + docs => columns.tsv
- name: merge docs+observations
run: ./scripts/merge/dbs.sh ./data/columns.sqlite ./data/merged.observations.sqlite ./data/merged.docs.sqlite
- run: ./scripts/dump_tsv.sh --output=./data/columns.tsv ./data/columns.sqlite
- name: check in ./data/columns.tsv if it changed
run: |
git --no-pager diff || exit 0 # something's up with the output here
# git config user.name "Automated"
# git config user.email "actions@users.noreply.github.com"
# git add ./data/columns.tsv || exit 0
# git commit -m "chore: update $(date '+%Y-%m-%dT%H:%M:%S%z')"
# git push