diff --git a/.github/workflows/check-frontend-urls.yml b/.github/workflows/check-frontend-urls.yml new file mode 100644 index 00000000..b0bbeabe --- /dev/null +++ b/.github/workflows/check-frontend-urls.yml @@ -0,0 +1,37 @@ +name: Check Refinebio Frontend Urls + +# Controls when the action will run. Triggers the workflow on push or pull request +# events but only for the master branch +on: + pull_request: + branches: [ master ] + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "check-frontend-urls" + check-frontend-urls: + runs-on: ubuntu-latest + container: + image: rocker/tidyverse:4.0.2 + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v2 + + - name: Install dependencies + run: Rscript -e "install.packages(c('optparse', 'jsonlite'))" + + - name: Run Script to check URLs + id: get_missing_urls + run: | + links=https://raw.githubusercontent.com/AlexsLemonade/refinebio-frontend/davidsmejia/906-examples-links/src/common/examples-links.json + results=$(Rscript "scripts/check-frontend-urls.R" --links $links) + echo "::set-output name=missing_urls::$results" + + # Fail if any number besides 0 is returned + - name: Check if any URLs were missing + if: ${{ steps.get_missing_urls.outputs.missing_urls != 'OK' }} + run: | + echo "Please verify that the following file(s) exist:" + echo ${{ steps.get_missing_urls.outputs.missing_urls }} + exit 1 diff --git a/scripts/check-frontend-urls.R b/scripts/check-frontend-urls.R new file mode 100644 index 00000000..fec743c8 --- /dev/null +++ b/scripts/check-frontend-urls.R @@ -0,0 +1,66 @@ +#!/usr/bin/env Rscript +# +# Check Refinebio-Frontend urls and return anything missing that was expected or NULL + +library(optparse) +library(jsonlite) + +option_list <- list( + make_option( + opt_str = c("-l", "--links"), type = "character", + default = NULL, + help = "JSON array that will be interpreted as the expected urls.", + metavar = ".json" + ) +) + +# Parse options +opt <- parse_args(OptionParser(option_list = option_list)) + +expected_urls <- fromJSON(txt=opt$links) + +# urls -> relatative file paths +expected_files <- stringr::word(expected_urls, 2, sep = '/refinebio-examples/') +expected_id_paths <- stringr::str_subset(expected_files, '#') +# remove anchors +expected_files <- unique(stringr::word(expected_files, 1, sep = '#')) + +# get a list of all html files in the project +existing_files <- list.files(pattern = 'html$', recursive = TRUE) + +# get anything thats not in the project +missing_files <- expected_files[!expected_files %in% existing_files] + +# check for missing ids after we know the file isnt missing +# if a file and id is missing it will first complain that the file is missing +if (any(missing_files)) { + expected_id_paths <- expected_id_paths[!grepl(paste(missing_files, collapse="|"), expected_id_paths)] +} +# find missing ids +missing_ids = c() + +expected_id_parts_list <- strsplit(expected_id_paths, "#") +for (expected_id_parts in expected_id_parts_list) { + expected_file <- expected_id_parts[1] + expected_id <- expected_id_parts[2] + expected_id_lines <- readr::read_lines(expected_file) + + # check if the id="{expected_id}" exists + found <- any(stringr::str_detect(expected_id_lines, paste0('id="', expected_id, '"'))) + + # add it to missing if not found + if (!found) { + missing_ids <- c(missing_ids, paste0(expected_id_parts, collapse = "#")) + } +} + + +# group the missing things +missing <- c(missing_files, missing_ids) + +# exit with the list or "OK" if nothing missing +if(length(missing) == 0) { + cat('OK') +} else { + cat(paste(missing, collapse = "\n")) +}