HPCC-XXXX Add workflow test to check if default helm output changes #2
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Smoketest github action | |
# ======================= | |
# | |
# Uses cached instances of previous builds to perform partial (and therefore quicker) incremental builds. | |
# Once the platform is built and installed (make install), | |
# it runs the regression suite setup stage within the same github job ('build-and-setup'). | |
# | |
# On success, an artifact is built, that contains the install binaries and | |
# the HPCC binaries that have accumulated from the setup stage. | |
# | |
# Once the 'build-and-setup' job is complete, the dependent regression suite jobs and unittest job are launched in parallel. | |
# The regression suite queries are manually sub-divided into chunks (alphabeticaly), and run via a job matrix for parallism. | |
# If anything fails, all jobs are aborted (fail-fast=true), and the logs are captured into a published artifact. | |
# | |
# NOTES: | |
# + pre-requisite build dependencies (and runtime dependencies) are listed manually, and must be kept up to date with the | |
# requirements of the platform. MORE: a list of required build dependencies could be kept within the platform source and picked up | |
# + 'cacheversion' is purely in case it is necessary to force a cache-miss, i.e. all cached items are based on this version | |
# + Caching is via github's actions/cache, and is limited to 5GB total per repository, with oldest ejected first, and/or >7 days | |
# cached builds are tagged with: | |
# 1) base+ref+SHA (exact match, e.g. hpccbuild-1-7.12.10-6c981c48ae6e35b62d86d8e59e42799c5cd14812) | |
# 2) base_ref (branch match, e.g. hpccbuild-1-7.12.10) | |
# 3) base-ref major-minor only (e.g. hpccbuild-1-7.12.) | |
# 4) base-ref major only (e.g. hpccbuild-1-7.) | |
# 5) generic cacheversion only (e.g. hpccbuild-1-) | |
# The cache will initially try to match an exact key match (only true if rerunning PR and SHA same), | |
# and will then failover to trying to find a match of 2-5, in that order. | |
name: smoketest | |
env: | |
cacheversion: 3 | |
VCPKG_BINARY_SOURCES: "clear;nuget,GitHub,read" | |
OS_DEPENDENCIES: "bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk | |
r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake" | |
on: | |
pull_request: | |
branches: | |
- "master" | |
- "newbuild" | |
- "candidate-*" | |
- "!candidate-7.6.*" | |
- "!candidate-7.4.*" | |
- "!candidate-7.2.*" | |
- "!candidate-7.0.*" | |
- "!candidate-6.*" | |
# NB: this fails to cancel in-flight actions, with error 'Resource not accessible by integration', | |
# but it's non-fatal. Appears to be dependent on what permissions the PR owner has, i.e. the github token used | |
# does not have permission to cancel actions. | |
jobs: | |
check-skip: | |
# continue-on-error: true # Uncomment once integration is finished | |
runs-on: ubuntu-22.04 | |
# Map a step output to a job output | |
outputs: | |
platform: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.platform }} | |
eclwatch: ${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') || steps.skip_check.outputs.eclwatch }} | |
steps: | |
- id: skip_check | |
uses: hpcc-systems/github-actions/changed-modules@main | |
with: | |
github_token: ${{ github.token }} | |
build-and-setup: | |
name: Build platform and regress setup | |
needs: check-skip | |
if: ${{ needs.check-skip.outputs.platform || needs.check-skip.outputs.eclwatch }} | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 150 # the build step has its own timeout, however the job can sometimes take time to download dependencies | |
outputs: | |
cache-state: ${{ steps.check-cache.outputs.state }} | |
steps: | |
- name: Setup and git clone | |
id: vars | |
run: | | |
if [[ "${{ github.event_name }}" = 'pull_request' ]] | |
then | |
echo ::set-output name=branch::${{ github.head_ref }} | |
echo ::set-output name=base_ref::${{ github.base_ref }} | |
versioned=$(echo ${{ github.base_ref }} | sed -E -n 's/^(.*)-([0-9]+)\.([0-9]+)\.([0-9]+|x)(-[0-9]+|).*$/\1 \2 \3 \4 \5/p') | |
if [[ "${versioned}" ]] | |
then | |
echo "matched version = ${versioned}" | |
IFS=' ' read prefix major minor point cand <<< ${versioned} | |
echo "name=base-majorminor-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}.${minor}." | |
echo "name=base-majoronly-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}." | |
echo ::set-output name=base-majorminor-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}.${minor}. | |
echo ::set-output name=base-majoronly-restore-key::hpccbuild-${{ env.cacheversion }}-${prefix}-${major}. | |
fi | |
else | |
echo ::set-output name=branch::${GITHUB_REF#refs/heads/} | |
echo ::set-output name=base_ref::${GITHUB_REF#refs/heads/} | |
fi | |
if [[ "${{ github.event.pull_request.head.repo.owner.login }}" = "${{ github.repository_owner }}" ]] | |
then | |
repository=${{ github.repository }} | |
else | |
repository=$(echo -n ${{ github.event.pull_request.head.repo.owner.login }}/ ; echo -n ${{ github.repository }} | sed -E -n 's@^[^/]+/(.+)$@\1@p') | |
fi | |
echo ::set-output name=repository::${repository} | |
echo "Cloning https://github.com/${repository}" | |
git clone -n https://github.com/${repository} src | |
if [[ "${{ github.event_name }}" = 'pull_request' ]] | |
then | |
cd src | |
git remote add upstream https://github.com/${{ github.repository }} | |
git fetch upstream +${{ github.ref }} | |
fetch_sha=$(git rev-parse --verify FETCH_HEAD) | |
git fetch upstream ${{ github.base_ref}} | |
base_ref_sha=$(git rev-parse upstream/${{ github.base_ref }}) | |
echo ::set-output name=base_ref_sha::${base_ref_sha} | |
echo ::set-output name=sha::${fetch_sha} | |
echo "base_ref_sha = ${base_ref_sha}" | |
echo "fetch_sha = ${fetch_sha}" | |
else | |
echo ::set-output name=sha::${{ github.sha }} | |
echo ::set-output name=base_ref_sha::${{ github.sha }} | |
fi | |
# NB: restore-keys set to e.g. hpccbuild-2-7.12.10, hpccbuild-2-7.12, hpccbuild-2-7, hpccbuild-2- | |
- name: Fetch cache | |
id: cache | |
uses: actions/cache@v2 | |
with: | |
path: | | |
build-cache | |
merge-patch | |
src-cache | |
key: hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }}-${{ steps.vars.outputs.sha }} | |
restore-keys: | | |
hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }} | |
${{ steps.vars.outputs.base-majorminor-restore-key }} | |
${{ steps.vars.outputs.base-majoronly-restore-key }} | |
hpccbuild-${{ env.cacheversion }}- | |
- name: tracing | |
run: | | |
echo "Branch = ${{ steps.vars.outputs.branch }}" | |
echo "Base ref = ${{ steps.vars.outputs.base_ref }}" | |
echo "SHA = ${{ steps.vars.outputs.sha}}" | |
echo "Action = ${{ github.action }}" | |
echo "Event = ${{ github.event_name }}" | |
echo "Actor = ${{ github.actor }}" | |
echo "Ref = ${{ github.ref }}" | |
echo "Sha = ${{ github.sha }}" | |
echo "github.repository = ${{ github.repository }}" | |
echo "repository = ${{ steps.vars.outputs.repository }}" | |
echo "repository_owner = ${{ github.repository_owner }}" | |
echo "github.workspace = ${{ github.workspace }}" | |
echo "runner.workspace = ${{ runner.workspace }}" | |
echo "github.event.pull_request.head.repo.owner.login = ${{ github.event.pull_request.head.repo.owner.login }}" | |
if [[ "${{ github.event_name }}" = 'pull_request' ]] | |
then | |
echo "PR base_ref = ${{ github.base_ref }}" | |
echo "PR head_ref = ${{ github.head_ref }}" | |
echo "PR base SHA = ${{ github.event.pull_request.base.sha }}" | |
fi | |
echo "restore-key1: hpccbuild-${{ env.cacheversion }}-${{ steps.vars.outputs.base_ref }}" | |
echo "restore-key2: ${{ steps.vars.outputs.base-majorminor-restore-key }}" | |
echo "restore-key3: ${{ steps.vars.outputs.base-majoronly-restore-key }}" | |
echo "restore-key4: hpccbuild-${{ env.cacheversion }}-" | |
# NB: actions/cache does not set cache-hit to true if restore-keys used. | |
# So here we: | |
# 1) check if a cache was restored | |
# 2) check that the SHA stored in the cache can be found in git | |
# 3) mv cache into place (build-cache->build) | |
# 4) checkout source @ base_ref | |
# 5) get submodules | |
# 6) apply git merge-patch from cache, bringing source up-to-date with the source used in the cached binary build | |
# 7) copy src-cache files into place | |
# 8) touch all files so that 'old' [before original make timestamp], i.e. so behind objects in cache | |
# 9) checkout pr branch - ensuring only pr files that differ are updated (and hence will rebuild) | |
# 10) list updated files (for tracing only) | |
# 11) set 'state' output variable to 'miss' if not absolute match, | |
# and set to 'hit' if cache is usable. i.e. if set at all, run build and tests. | |
- name: Check cache | |
id: check-cache | |
if: ${{ (steps.cache.outputs.cache-hit != 'true') || contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') }} | |
run: | | |
if [[ ("${{ steps.cache.outputs.cache-hit }}" == 'true') && ("${{ contains(github.event.pull_request.labels.*.name, 'smoketest-force-rerun') }}" == 'true') ]] | |
then | |
echo "Exact cache match, but 'smoketest-force-rerun' label forced rerun" | |
fi | |
echo ::set-output name=state::miss | |
if [[ "${{ contains(github.event.pull_request.labels.*.name, 'smoketest-ignore-cache') }}" == 'true' ]] | |
then | |
echo "Cache found, but ignored because 'smoketest-ignore-cache' label set" | |
rm -rf build-cache merge-patch src-cache | |
else | |
if [[ -d build-cache ]] | |
then | |
echo "Attempting to use existing cache" | |
stat build-cache merge-patch src-cache | |
du -sh build-cache merge-patch src-cache | |
cd src | |
if [[ "commit" == "$(git cat-file -t $(cat ../build-cache/cache-sha))" ]] | |
then | |
mv ../build-cache ../build | |
echo "Checking out cache SHA (cached github.base_ref SHA): $(cat ../build/cache-sha)" | |
git checkout $(cat ../build/cache-sha) | |
git submodule update --init --recursive --jobs 4 | |
echo "Applying merge patch" | |
git apply ../merge-patch | |
# Add all mods that came from merge-patch into a commit, | |
# so that the final git checkout will not clash with working tree files | |
git add -A | |
git -c user.name='Smoketest' -c user.email='smoke@test.com' commit -m 'merge-patch' | |
cd ../src-cache | |
echo "Cached source tree files:" | |
find . -type f | |
echo "================" | |
find . -type f | cpio -p -dum ../src | |
cd ../src | |
find . -type f -exec touch -r ../build/cache-timestamp {} + | |
git checkout ${{ steps.vars.outputs.sha }} | |
git submodule update --init --recursive --jobs 4 | |
echo "Changed files (from SHA: $(cat ../build/cache-sha)):" | |
find -name '.git*' -prune -o -newer ../build/cache-timestamp -type f -print | |
echo ::set-output name=state::hit | |
else | |
echo "Cached SHA $(cat ../build-cache/cache-sha) could not be found). Skipping cache." | |
fi | |
fi | |
fi | |
- name: Dependencies22.04 | |
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} | |
sudo apt-get install -y xmlstarlet | |
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - | |
sudo apt-get install -y nodejs | |
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" | |
- name: Cache miss prep | |
if: ${{ steps.check-cache.outputs.state == 'miss' }} | |
run: | | |
rm -rf build build-cache merge-patch src-cache | |
mkdir build | |
mkdir src-cache | |
cd src | |
git checkout ${{ steps.vars.outputs.sha }} | |
git submodule update --init --recursive --jobs 4 | |
- name: "Setup NuGet credentials" | |
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed | |
run: | | |
mono `vcpkg fetch nuget | tail -n 1` \ | |
sources add \ | |
-source "https://nuget.pkg.github.com/hpcc-systems/index.json" \ | |
-storepasswordincleartext \ | |
-name "GitHub" \ | |
-username "${{ github.actor }}" \ | |
-password "${{ secrets.GITHUB_TOKEN }}" | |
- name: "Remove builtin vcpkg" | |
if: ${{ github.event_name == matrix.event_name && needs.check-skip.outputs.platform }} | |
shell: "bash" | |
run: | | |
sudo rm -rf "$VCPKG_INSTALLATION_ROOT" | |
- name: "vcpkg Bootstrap" | |
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set then step needed | |
working-directory: ./src/vcpkg | |
run: | | |
./bootstrap-vcpkg.sh | |
# Subsequent cache hit builds use cache-timestamp to ensure all sources are marked 'old', | |
# except those changed between the cache SHA and the SHA being built. | |
# NB: BUILD_TAG is overridden to a constant "smoketest", this is to prevent the auto-generated build tag being | |
# used, which would cause it to change per PR (per SHA), and because it's in a common header, cause a lot of undesirable rebuilding. | |
- name: Build | |
timeout-minutes: 120 # Generous, typical build time from clean is < 60 | |
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set to anything, then step needed | |
run: | | |
cd build | |
echo "removing CMakeCache.txt" | |
rm -f CMakeCache.txt | |
if [[ ! -f cache-timestamp ]]; then touch cache-timestamp; fi | |
touch ../cmake-timestamp | |
`../src/vcpkg/vcpkg fetch cmake | tail -n 1` ../src -Wno-dev -DRUNTIME_USER=$(whoami) -DRUNTIME_GROUP=$(id -gn) -DDESTDIR=$(realpath ..)/install -DINCLUDE_PLUGINS=1 -DCMAKE_BUILD_TYPE=Release -DUSE_PYTHON2=0 -DUSE_PYTHON3=1 -DSUPPRESS_SPARK=1 -DUSE_CPPUNIT=1 -DUSE_CASSANDRA=Off -DCMAKE_CXX_FLAGS="-DBUILD_TAG=\\\"dummytag\\\" -DBUILD_VERSION_MAJOR=1 -DBUILD_VERSION_MINOR=1 -DBUILD_VERSION_POINT=0" -DBUILD_TAG="smoketest" -DSTRIP_RELEASE_SYMBOLS=0 -DCMAKE_CXX_FLAGS="-DTRACE_GLOBAL_GROUP=1" | |
make -j$(nproc) install | |
- name: build-logs-artifact | |
if: ${{ failure() }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: build-logs | |
path: | | |
build/CMakeCache.txt | |
build/CMakeFiles/CMakeOutput.log | |
build/CMakeFiles/CMakeError.log | |
# Cache contains: | |
# - make binaries (build-cache) | |
# - Any sources written since cache-timestamp within the source dir (src-cache) (e.g. AWS writes a config header) | |
# - cache-timestamp, timestamp of cache-miss build. All files except diffs set to this time, ensuring no existing binaries are rebuilt | |
# - cache-sha, the SHA of the base_ref used to build. On a cache hit, used to find changed files and ensure they are only ones rebuilt. | |
# - merge-path, the diffs between the base_ref and the PR commit at the time of PR/cache build. | |
- name: Prepare cache | |
if: ${{ steps.check-cache.outputs.state != '' }} # if 'state' set to anything, then step needed | |
run: | | |
echo ${{ steps.vars.outputs.base_ref_sha }} > build/cache-sha | |
mv build build-cache | |
cd src | |
find -name '.git*' -prune -o -newer ../cmake-timestamp -type f -print | cpio -p -dum ../src-cache | |
echo "Merge patch files:" | |
git diff --name-only ${{ steps.vars.outputs.base_ref_sha }} ${{ steps.vars.outputs.sha }} | |
git diff --binary ${{ steps.vars.outputs.base_ref_sha }} ${{ steps.vars.outputs.sha }} > ../merge-patch | |
# - alter stock ecl-test.json, to point to install dir (using jq) | |
# - create a clean copy of the install directory (for later reuse) | |
# - rm hpcc-data from copy (the post-setup hpcc-data will be moved in when done) | |
- name: Prepare install artifact | |
if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} | |
run: | | |
# configure ecl-test.json with correct paths | |
jq --arg rootdir "${{ github.workspace }}" \ | |
'.Regress.dropzonePath = $rootdir + "/install" + .Regress.dropzonePath | .Regress.setupExtraParams.OriginalTextFilesOsPath = $rootdir + "/install" + .Regress.setupExtraParams.OriginalTextFilesOsPath | .Regress.setupExtraDParams.HPCCBaseDir = $rootdir + "/install" + .Regress.setupExtraDParams.HPCCBaseDir | .Regress.regressionDir = $rootdir + "/regress" | .Regress.maxAttemptCount = "1" | .Regress.logDir = $rootdir + "/regress/log"' \ | |
install/opt/HPCCSystems/testing/regress/ecl-test.json > ecl-test.json | |
mv -f ecl-test.json install/opt/HPCCSystems/testing/regress/ecl-test.json | |
# configure environment.xml to slavesPerNode=2, channelsPerNode=1 | |
xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 install/etc/HPCCSystems/environment.xml | |
cp ${{ github.workspace }}/src/.github/workflows/timeoutcmd install/opt/HPCCSystems/bin/ | |
cp ${{ github.workspace }}/src/.github/workflows/smoketest-preabort.sh install/opt/HPCCSystems/bin/ | |
mkdir copy | |
cp -rp install copy/ | |
rm -rf copy/install/var/lib/HPCCSystems/hpcc-data | |
rm -rf copy/install/var/lib/HPCCSystems/queries | |
- name: Run regression suite setup | |
if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} | |
timeout-minutes: 10 | |
run: | | |
export LANG="en_US.UTF-8" | |
sudo update-locale | |
source install/opt/HPCCSystems/sbin/hpcc_setenv | |
install/opt/HPCCSystems/etc/init.d/hpcc-init start | |
mkdir -p regress | |
cd install/opt/HPCCSystems/testing/regress | |
# force regression suite to timeout after 8 minutes, so it captures ZAP report of any inflight hung queries | |
timeoutcmd $((8 * 60)) \ | |
./ecl-test setup --preAbort 'smoketest-preabort.sh' --pq 2 --generateStackTrace | |
grep Failure: ${{ github.workspace }}/regress/log/setup_*.log | |
if [[ "$(grep -ohP '(?<=^Failure: )[0-9]+$' ${{ github.workspace }}/regress/log/setup_*.log | awk '{s+=$1} END {print s}')" -gt 0 ]]; then exit 1; fi | |
${{ github.workspace }}/install/opt/HPCCSystems/etc/init.d/hpcc-init stop | |
- name: regression-setup-logs-artifact | |
if: ${{ failure() || cancelled() }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: regress-setup-logs | |
path: | | |
install/var/log/HPCCSystems | |
regress/ | |
# - mv regression suite setup created data from hpcc-data and queries into copy of install | |
# - create tarball of install for artifact uploading | |
- name: Finalize install artifact | |
if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} | |
run: | | |
mv install/var/lib/HPCCSystems/hpcc-data copy/install/var/lib/HPCCSystems/ | |
mv install/var/lib/HPCCSystems/queries copy/install/var/lib/HPCCSystems/ | |
[[ -d ${{ github.workspace }}/src/esp/src/test-ui ]] && cp -r ${{ github.workspace }}/src/esp/src/test-ui copy/install/ | |
cd copy | |
tar --zstd -cf ../install.tgz install | |
- name: Upload install artifact | |
if: ${{ steps.check-cache.outputs.state != '' && github.event_name == 'pull_request' }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: installsetup-artifact | |
path: | | |
install.tgz | |
# Matrix of jobs run in parallel once build+setup above completes successfully. | |
# All use the post-setup install.tgz artifact, that contains binaries and setup state | |
# Break each engine run into sets for speed | |
# NB: each regression suite job, runs these steps: | |
# - installs dependencies (probably too many for runtime) | |
# - Starts HPCC | |
# - Runs regression suite with params { engine, match[pattern,exclude] } | |
# TODO: needs to process results, capture report, to be assembled by workflow when all jobs done | |
regression-jobs: | |
needs: build-and-setup | |
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.platform }} | |
timeout-minutes: 60 # each matrix step has its own timeout, however the job can sometimes take time to download dependencies | |
runs-on: ubuntu-22.04 | |
strategy: | |
fail-fast: true | |
matrix: | |
engine: ['hthor', 'thor', 'roxie'] | |
match: | |
- pattern: '[a-d]*.ecl' | |
- pattern: '[e-h]*.ecl' | |
- pattern: '[i-l]*.ecl' | |
- pattern: '[m-q]*.ecl' | |
- pattern: '[r-u]*.ecl' | |
exclude: teststdlibrary.ecl | |
- pattern: '[v-z]*.ecl' | |
include: | |
- engine: 'hthor' | |
match: | |
pattern: teststdlibrary.ecl | |
- engine: 'thor' | |
match: | |
pattern: teststdlibrary.ecl | |
- engine: 'roxie' | |
match: | |
pattern: teststdlibrary.ecl | |
steps: | |
- uses: actions/download-artifact@v2 | |
with: | |
name: installsetup-artifact | |
- name: vars | |
id: vars | |
run: | | |
echo ::set-output name=matrix-setname::$(echo -n ${{ matrix.match.pattern }} | tr -c "[:alnum:]" _) | |
- name: Prerequisites | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} | |
sudo apt-get install -y gdb | |
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - | |
sudo apt-get install -y nodejs | |
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" | |
- name: run | |
timeout-minutes: 30 # generous, each matrix step shouldn't take more than 10 mins | |
run: | | |
export LANG="en_US.UTF-8" | |
sudo update-locale | |
tar --zstd -xf install.tgz | |
source install/opt/HPCCSystems/sbin/hpcc_setenv | |
install/opt/HPCCSystems/etc/init.d/hpcc-init start | |
cd install/opt/HPCCSystems/testing/regress | |
(cd ecl; ls ${{ matrix.match.pattern }}) > matches.tmp | |
echo queries are: | |
cat matches.tmp | |
if [[ -z "${{ matrix.match.exclude }}" ]] | |
then | |
queries="$(cat matches.tmp | tr '\n' ' ')" | |
else | |
queries="$(cd ecl; ls ${{ matrix.match.exclude }} | grep -v -f - ../matches.tmp | tr '\n' ' ')" | |
fi | |
echo queries after exclude: ${queries} | |
# force regression suite to timeout after 28 minutes, so it captures ZAP report of any inflight hung queries | |
timeoutcmd $((28 * 60)) \ | |
./ecl-test query --preAbort 'smoketest-preabort.sh' --pq 2 --target ${{ matrix.engine }} --excludeclass python2,embedded-r,embedded-js,3rdpartyservice,mongodb --generateStackTrace ${queries} | |
grep Failure: ${{ github.workspace }}/regress/log/${{ matrix.engine }}.*.log | |
if [[ "$(grep -oP '(?<=^Failure: )[0-9]+$' ${{ github.workspace }}/regress/log/${{ matrix.engine }}.*.log)" -gt 0 ]]; then exit 1; fi | |
- name: regression-run-logs-artifact | |
if: ${{ failure() || cancelled() }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: regression-run-logs-${{ matrix.engine }}-${{ steps.vars.outputs.matrix-setname }} | |
path: | | |
install/var/log/HPCCSystems | |
regress/ | |
if-no-files-found: error | |
# NB: this doesn't really need the post-setup data files included in install.tgz | |
# but as this is relatively quick and in parallel with others, it probably doesn't matter | |
unittests: | |
needs: build-and-setup | |
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' }} | |
timeout-minutes: 30 # the unittests run step has its own timeout, however the job can sometimes take time to download dependencies | |
runs-on: ubuntu-22.04 | |
steps: | |
- uses: actions/download-artifact@v2 | |
with: | |
name: installsetup-artifact | |
- name: Prerequisites | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} | |
sudo apt-get install -y gdb | |
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - | |
sudo apt-get install -y nodejs | |
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" | |
- name: run | |
timeout-minutes: 10 # generous, expected time is approx 1 min. | |
run: | | |
export LANG="en_US.UTF-8" | |
sudo update-locale | |
tar --zstd -xf install.tgz | |
install/opt/HPCCSystems/bin/unittests | |
# NB: this doesn't really need the post-setup data files included in install.tgz | |
# but as this is relatively quick and in parallel with others, it probably doesn't matter | |
eclwatch-ui-tests: | |
needs: build-and-setup | |
if: ${{ needs.build-and-setup.outputs.cache-state != '' && github.event_name == 'pull_request' && needs.check-skip.outputs.eclwatch }} | |
timeout-minutes: 30 # the ui-tests run step has its own timeout, however the job can sometimes take time to download dependencies | |
runs-on: ubuntu-22.04 | |
steps: | |
- uses: actions/download-artifact@v2 | |
with: | |
name: installsetup-artifact | |
- name: Check-ECLWatch-UI-Test-Directory | |
id: check | |
run: | | |
tar --zstd -xf install.tgz | |
if [[ ! -d install/test-ui/tests ]] | |
then | |
echo "ECLWatch UI test-ui/tests directory missing." | |
else | |
javaFilesCount=$(find install/test-ui/tests/ -iname '*.java' -type f -print | wc -l ) | |
echo "Number of test java files is $javaFilesCount" | |
if [[ ${javaFilesCount} -eq 0 ]] | |
then | |
echo "No java files, do nothing." | |
else | |
echo ::set-output name=runtests::true | |
fi | |
fi | |
- name: Prerequisites | |
if: steps.check.outputs.runtests | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y git wget net-tools | |
sudo apt-get install -y tzdata unzip xvfb libxi6 | |
sudo apt-get install -y default-jdk | |
sudo apt-get install -y ${{ env.OS_DEPENDENCIES }} | |
sudo apt-get install -y gdb | |
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - | |
sudo apt-get install -y nodejs | |
echo "node.js version = $(node --version), npm version = $(npm --version), release = $(lsb_release -sc)" | |
- name: UI-Test-Prerequisites | |
if: steps.check.outputs.runtests | |
run: | | |
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb | |
sudo apt-get install -y ./google-chrome-stable_current_amd64.deb | |
wget https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip | |
unzip chromedriver_linux64.zip | |
sudo mv chromedriver /usr/bin/chromedriver | |
sudo chown root:root /usr/bin/chromedriver | |
sudo chmod +x /usr/bin/chromedriver | |
wget https://selenium-release.storage.googleapis.com/3.141/selenium-server-standalone-3.141.59.jar | |
wget http://www.java2s.com/Code/JarDownload/testng/testng-6.8.7.jar.zip | |
unzip testng-6.8.7.jar.zip | |
- name: run | |
timeout-minutes: 10 # generous, current runtime is ~1min, this should be increased if new tests are added | |
if: steps.check.outputs.runtests | |
run: | | |
export LANG="en_US.UTF-8" | |
sudo update-locale | |
source install/opt/HPCCSystems/sbin/hpcc_setenv | |
install/opt/HPCCSystems/etc/init.d/hpcc-init start | |
export CLASSPATH=".:$(realpath selenium-server-standalone-3.141.59.jar):$(realpath testng-6.8.7.jar)" | |
pushd install/test-ui | |
./run.sh tests http://localhost:8010 > eclWatchUiTest.log 2>&1 | |
retCode=$? | |
echo "UI test done" | |
[[ $retCode -ne 0 ]] && exit 1 | |
popd | |
- name: eclwatch-ui-test-logs-artifact | |
if: ${{ failure() || cancelled() }} | |
uses: actions/upload-artifact@v2 | |
with: | |
name: ECLWatchUiTest | |
path: install/test-ui/eclWatchUiTest.log | |
if-no-files-found: error |