Skip to content

Update with requested PRs #176

Update with requested PRs

Update with requested PRs #176

name: Update with requested PRs
on:
workflow_dispatch:
schedule: # run Tuesdays and Wednesdays at 12:00
- cron: '0 12 * * 2'
- cron: '0 12 * * 3'
jobs:
pr-requests:
runs-on: ubuntu-latest
if: github.repository == 'AliceO2Group/dpg-documentation'
steps:
- name: Set CERN timezone
run: |
sudo timedatectl set-timezone "Europe/Zurich"
- name: Setup python
run: |
# to convert UTC times to Europe/Zurich (CERN time)
pip install python-dateutil
- name: Checkout documentation
uses: actions/checkout@v4
with:
repository: AliceO2Group/dpg-documentation
path: dpg-documentation
persist-credentials: false
# To push the updated branch, we need a non-shallow clone.
fetch-depth: 0
- name: Create PR branch in docs
working-directory: dpg-documentation
run: |
git config --global user.email 'alibuild@cern.ch'
git config --global user.name 'ALICE Action Bot'
# Overwrite branch, creating a new one based on HEAD
git checkout -B auto-update-requests
- name: Get PRs from AliceO2
id: get_prs_o2
uses: octokit/graphql-action@v2.x
env:
GITHUB_TOKEN: ${{ github.token }}
with:
query: |
query {
doc: repository(owner: "AliceO2Group", name: "dpg-documentation") {
id
}
AliceO2: repository(owner: "AliceO2Group", name: "AliceO2") {
pullRequests(first: 100, states: [MERGED, OPEN], orderBy: { field: UPDATED_AT, direction: DESC }) {
nodes {
title
createdAt
updatedAt
mergedAt
number
labels(first:10) {
edges{
node {
name
}
}
}
state
url
author {
login
}
mergeCommit {
oid
}
commits(first: 2) {
nodes {
commit {
oid
}
}
}
}
}
}
}
- name: Get PRs from O2DPG
id: get_prs_o2dpg
uses: octokit/graphql-action@v2.x
env:
GITHUB_TOKEN: ${{ github.token }}
with:
query: |
query {
O2DPG: repository(owner: "AliceO2Group", name: "O2DPG") {
pullRequests(first: 100, states: [MERGED, OPEN], orderBy: { field: UPDATED_AT, direction: DESC }) {
nodes {
title
createdAt
updatedAt
mergedAt
number
labels(first:10) {
edges{
node {
name
}
}
}
state
url
author {
login
}
mergeCommit {
oid
}
commits(first: 2) {
nodes {
commit {
oid
}
}
}
}
}
}
}
- name: Get PRs from QC
id: get_prs_qc
uses: octokit/graphql-action@v2.x
env:
GITHUB_TOKEN: ${{ github.token }}
with:
query: |
query {
QualityControl: repository(owner: "AliceO2Group", name: "QualityControl") {
pullRequests(first: 100, states: [MERGED, OPEN], orderBy: { field: UPDATED_AT, direction: DESC }) {
nodes {
title
createdAt
updatedAt
mergedAt
number
labels(first:10) {
edges{
node {
name
}
}
}
state
url
author {
login
}
mergeCommit {
oid
}
commits(first: 2) {
nodes {
commit {
oid
}
}
}
}
}
}
}
- name: Get PRs from O2Physics
id: get_prs_o2physics
uses: octokit/graphql-action@v2.x
env:
GITHUB_TOKEN: ${{ github.token }}
with:
query: |
query {
O2Physics: repository(owner: "AliceO2Group", name: "O2Physics") {
pullRequests(first: 100, states: [MERGED, OPEN], orderBy: { field: UPDATED_AT, direction: DESC }) {
nodes {
title
createdAt
updatedAt
mergedAt
number
labels(first:10) {
edges{
node {
name
}
}
}
state
url
author {
login
}
mergeCommit {
oid
}
commits(first: 2) {
nodes {
commit {
oid
}
}
}
}
}
}
}
- name: Combine and store PRs
id: combine_prs
run: |
cat <<\EOF > prs_o2.json
${{ steps.get_prs_o2.outputs.data }}
EOF
cat <<\EOF > prs_o2dpg.json
${{ steps.get_prs_o2dpg.outputs.data }}
EOF
cat <<\EOF > prs_qc.json
${{ steps.get_prs_qc.outputs.data }}
EOF
cat <<\EOF > prs_o2physics.json
${{ steps.get_prs_o2physics.outputs.data }}
EOF
jq -s '.[0] * .[1] * .[2] * .[3]' prs_o2.json prs_o2dpg.json prs_qc.json prs_o2physics.json > prs.json
- name: Make the request pages
shell: python
run: |
import json
from time import time
from datetime import datetime
from dateutil import tz, parser
def sort_prs(prs):
# Need to sort by
# - MERGED or OPEN
# - async-labels
# - mergedAt or updatedAt
prs_dict = {}
repo_pr_numbers = []
for pr in prs:
repo_name = pr['repo_name']
repo_name_number = (repo_name, pr['number'])
if repo_name_number in repo_pr_numbers:
continue
repo_pr_numbers.append(repo_name_number)
flag = pr['state']
if flag not in prs_dict:
prs_dict[flag] = {}
label_edges = pr.get('labels', {}).get('edges', None)
if not label_edges:
continue
label_names = []
for edge in label_edges:
label_name = edge.get('node', {}).get('name', None)
if not label_name:
continue
if not label_name.startswith('async-'):
continue
label_names.append(label_name)
for label_name in label_names:
if label_name not in prs_dict[flag]:
prs_dict[flag][label_name] = {}
if repo_name not in prs_dict[flag][label_name]:
prs_dict[flag][label_name][repo_name] = []
prs_dict[flag][label_name][repo_name].append(pr)
return prs_dict
def make_report(flag, prs_per_label, outfile):
# prepare to convert time to CERN/Geneva time correctly
to_zone = tz.gettz('Europe/Zurich')
with open(outfile, 'w') as f:
f.write(f'# List PRs in state {flag} (from oldest to recent per package), last update: {datetime.fromtimestamp(int(time())).strftime("%Y-%m-%d %H:%M")} \n')
for label_name, prs_per_repo in prs_per_label.items():
f.write(f'\n\n### For label {label_name}\n\n')
if flag == 'MERGED':
sort_by_field = 'mergedAt'
common_header = '| Author | Package | PR | PR title | Merged at | #original commits | Merge commit |\n| --- | --- | --- | --- | --- | --- | --- |\n'
else:
sort_by_field = 'updatedAt'
common_header = '| Author | Package | PR | PR title | Updated at | #original commits | Merge commit |\n| --- | --- | --- | --- | --- | --- | --- |\n'
f.write(common_header)
for repo_name, prs in prs_per_repo.items():
sort_by = [pr[sort_by_field] for pr in prs]
for time_iso_utc, pr in sorted(zip(sort_by, prs)):
time_geneva = parser.parse(time_iso_utc)
time_geneva = time_geneva.astimezone(to_zone).strftime('%Y-%m-%d %H:%M:%S')
# we fetch at most 10 commits, make that clear
n_commits = len(pr["commits"]["nodes"])
if n_commits >= 2:
n_commits = f'at least {n_commits}'
f.write(f'| {pr["author"]["login"]} | {repo_name} | [PR]({pr["url"]}) | {pr["title"]} | {time_geneva} | {n_commits} | {pr["mergeCommit"]["oid"] if pr["mergeCommit"] else "-"} |\n')
def make_markdowns(prs_json):
# a list of all PRs
prs = []
with open(prs_json, 'r') as f:
prs_github = json.load(f)
for repo_name, prs_repo in prs_github.items():
if repo_name not in ('AliceO2', 'O2DPG', 'QualityControl', 'O2Physics'):
continue
for pr in prs_repo.get('pullRequests', {}).get('nodes', []):
# add the repo name manually, otherwise we would also need to query it in addition from GitHub; but we know what we are asking for in the initial query
pr['repo_name'] = repo_name
prs.append(pr)
# sort before further processing (by state, label and merge/update timestamp)
prs = sort_prs(prs)
for flag, prs_per_label in prs.items():
make_report(flag, prs_per_label, f'dpg_pr_report_{flag}.md')
make_markdowns('prs.json')
- name: Add and push changes, get repo ID
id: add_push_repo_id
env:
GITHUB_TOKEN: ${{ secrets.ALIBUILD_GITHUB_TOKEN }}
working-directory: dpg-documentation/
run: |
cp ../*.md docs/software/requests_automatic
echo "NEED_PR=true" >> $GITHUB_OUTPUT
if git diff --quiet; then
echo "No changes, nothing to push."
echo "NEED_PR=false" >> $GITHUB_OUTPUT
exit 0 # Nothing has changed, so no need to send a PR.
fi
# Add the markdown files here
git add *.md
git commit -m 'Automatic update of PR requests'
git remote set-url origin "https://alibuild:$GITHUB_TOKEN@github.com/AliceO2Group/dpg-documentation"
git push -f origin auto-update-requests
repo_id=$(jq -r '.doc.id' ../prs.json)
echo "REPO_ID=${repo_id}" >> $GITHUB_OUTPUT
- name: Open PR
id: open_pr
if: ${{ steps.add_push_repo_id.outputs.NEED_PR == 'true' }}
uses: octokit/graphql-action@v2.x
env:
GITHUB_TOKEN: ${{ secrets.ALIBUILD_GITHUB_TOKEN }}
repo_id: ${{ github.repository_id }}
with:
query: |
mutation CreatePullRequest($repoId: ID!) {
createPullRequest(input: {
repositoryId: $repoId,
baseRefName: "main",
headRefName: "auto-update-requests",
title: "Update requests",
body: ""
}) {
pullRequest {
number
url
}
}
}
variables: |
{
"repoId": "${{ steps.add_push_repo_id.outputs.REPO_ID }}"
}