Skip to content

Commit

Permalink
Use reusable test workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
jwallwork23 committed Nov 23, 2024
1 parent 8c7df8c commit eab819e
Show file tree
Hide file tree
Showing 4 changed files with 398 additions and 54 deletions.
75 changes: 21 additions & 54 deletions .github/workflows/test_suite.yml
Original file line number Diff line number Diff line change
@@ -1,63 +1,30 @@
name: Install and test UM2N
name: 'Run UM2N Test Suite'

on:
# Run test suite whenever main is updated
push:
branches:
- main

# Run test suite whenever commits are pushed to an open PR
pull_request:

jobs:
test-warpmesh:
name: Test UM2N
runs-on: ubuntu-latest
container:
image: firedrakeproject/firedrake:latest
options: --user root
steps:
- uses: actions/checkout@v3

- name: Cleanup
if: ${{ always() }}
run: |
cd ..
rm -rf build
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.8

- name: Lint check
if: ${{ always() }}
run: |
. /home/firedrake/firedrake/bin/activate
python3 -m pip install ruff
ruff check
- name: Install Movement
run: |
. /home/firedrake/firedrake/bin/activate
git clone https://github.com/mesh-adaptation/movement.git
cd movement
python3 -m pip install -e .
# Run test suite every Sunday at 1AM
schedule:
- cron: '0 1 * * 0'

- name: Install PyTorch
run: |
. /home/firedrake/firedrake/bin/activate
python3 -m pip install torch --index-url https://download.pytorch.org/whl/cpu
- name: Install PyTorch3d
run: |
. /home/firedrake/firedrake/bin/activate
python3 -m pip install 'git+https://github.com/facebookresearch/pytorch3d.git'
- name: Install UM2N
run: |
. /home/firedrake/firedrake/bin/activate
python3 -m pip install -e .
- name: Run UM2N test suite
run: |
. /home/firedrake/firedrake/bin/activate
python3 -m pytest tests/test* -v
jobs:
test_suite:
uses: mesh-adaptation/mesh-adaptation-docs/.github/workflows/reusable_test_suite.yml@main
with:
install-command: 'python -m pip uninstall -y UM2N && python -m pip install -e .'
test-command: |
export GITHUB_ACTIONS_TEST_RUN=1
python $(which firedrake-clean)
python -m coverage erase
python -m coverage run -a --source=UM2N -m pytest -v --durations=20 test
python -m coverage report
changed-files-patterns: |
**/*.py
**/*.msh
**/*.geo
228 changes: 228 additions & 0 deletions test/dataset_integrity_check.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import warnings\n",
"\n",
"# import glob\n",
"import torch\n",
"from torch_geometric.utils import index_to_mask\n",
"\n",
"import UM2N\n",
"\n",
"warnings.filterwarnings('ignore')\n",
"device = torch.device('cuda' if torch.cuda.is_available()\n",
"else 'cpu')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def check_phi_attached(data_path):\n",
" data_set = UM2N.MeshDataset(data_path)\n",
" for i in range(len(data_set)):\n",
" try:\n",
" assert hasattr(data_set[i], 'phi')\n",
" except AttributeError:\n",
" raise ValueError(\"[NO PHI]: \", data_path)\n",
" return\n",
"\n",
"data_type = [\n",
" \"iso_full\",\n",
" \"iso_pad\",\n",
" \"aniso_full\",\n",
" \"aniso_pad\",\n",
"]\n",
"\n",
"subset_name = [\n",
" \"data\",\n",
"]\n",
"\n",
"\n",
"n_grid_start = 15\n",
"n_grid_end = 35\n",
"\n",
"data_type = [\n",
" \"iso_full\",\n",
" \"iso_pad\",\n",
" \"aniso_full\",\n",
" \"aniso_pad\",\n",
"]\n",
"\n",
"subset_name = [\n",
" \"data\",\n",
"]\n",
"\n",
"\n",
"# check for validation set\n",
"for n_grid in range(n_grid_start, n_grid_end + 1):\n",
" for data in data_type:\n",
" for subset in subset_name:\n",
" base_path = (\n",
" f\"/Users/cw1722/Documents/warpmesh/data/dataset/helmholtz/z=<0,1>_ndist=None_max_dist=6_<{n_grid}x{n_grid}>_n=100_{data}/{subset}\"\n",
" )\n",
" try:\n",
" check_phi_attached(base_path)\n",
" print(f\"suceess: check for {base_path}\")\n",
" except ValueError:\n",
" print(f\"failed: check for {base_path}\")\n",
" print()\n",
"\n",
"\n",
"n_grid_start = 15\n",
"n_grid_end = 20\n",
"\n",
"data_type = [\n",
" \"iso_pad\",\n",
" \"aniso_full\",\n",
"]\n",
"\n",
"subset_name = [\n",
" \"data\",\n",
"]\n",
"\n",
"# check for training set\n",
"for n_grid in range(n_grid_start, n_grid_end + 1, 5):\n",
" for data in data_type:\n",
" for subset in subset_name:\n",
" base_path = (\n",
" f\"/Users/cw1722/Documents/warpmesh/data/dataset/helmholtz/z=<0,1>_ndist=None_max_dist=6_<{n_grid}x{n_grid}>_n=100_{data}/{subset}\"\n",
" )\n",
" try:\n",
" check_phi_attached(base_path)\n",
" except ValueError:\n",
" print(f\"failed: check for {base_path}\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def check_neighbour(data_path, source_idx=3):\n",
" # file_pattern = os.path.join(data_path, 'data_*.npy')\n",
" # files = glob.glob(file_pattern)\n",
" # for file in files:\n",
" data_set = UM2N.MeshDataset(data_path)\n",
" for i in range(len(data_set)):\n",
" coords = data_set[i].x[:, :2]\n",
" num_nodes = coords.shape[0]\n",
" source_mask = index_to_mask(\n",
" torch.tensor([source_idx]), num_nodes\n",
" )\n",
" nei = UM2N.get_neighbors(source_mask, data_set[i].edge_index)\n",
" if (nei.sum() == 6):\n",
" pass\n",
" else:\n",
" raise ValueError(f\"In dataset {data_path} The number of neighbors is not 6\")\n",
" return\n",
"\n",
"\n",
"check_neighbour(\"/Users/cw1722/Documents/warpmesh/data/helmholtz/z=<0,1>_ndist=None_max_dist=6_<16x16>_n=100_iso_full/data\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"n_grid_start = 15\n",
"n_grid_end = 35\n",
"\n",
"data_type = [\n",
" \"iso_full\",\n",
" \"iso_pad\",\n",
" \"aniso_full\",\n",
" \"aniso_pad\",\n",
"]\n",
"\n",
"subset_name = [\n",
" \"data\",\n",
"]\n",
"\n",
"\n",
"# check for validation set\n",
"for n_grid in range(n_grid_start, n_grid_end + 1):\n",
" for data in data_type:\n",
" for subset in subset_name:\n",
" base_path = (\n",
" f\"/Users/cw1722/Documents/warpmesh/data/dataset/helmholtz/z=<0,1>_ndist=None_max_dist=6_<{n_grid}x{n_grid}>_n=100_{data}/{subset}\"\n",
" )\n",
" print(f\"passed: check for {base_path}\")\n",
" check_neighbour(base_path)\n",
"\n",
"\n",
"n_grid_start = 15\n",
"n_grid_end = 20\n",
"\n",
"data_type = [\n",
" \"iso_pad\",\n",
" \"aniso_full\",\n",
"]\n",
"\n",
"subset_name = [\n",
" \"data\",\n",
"]\n",
"\n",
"# check for training set\n",
"for n_grid in range(n_grid_start, n_grid_end + 1, 5):\n",
" for data in data_type:\n",
" for subset in subset_name:\n",
" base_path = (\n",
" f\"/Users/cw1722/Documents/warpmesh/data/dataset/helmholtz/z=<0,1>_ndist=None_max_dist=6_<{n_grid}x{n_grid}>_n=100_{data}/{subset}\"\n",
" )\n",
" print(f\"passed: check for {base_path}\")\n",
" check_neighbour(base_path)\n",
"\n",
"print(\"All checks passed!\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
31 changes: 31 additions & 0 deletions test/test_import.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Author: Chunyang Wang
# GitHub Username: acse-cw1722

from pytest import fixture


@fixture(scope="module")
def UM2N():
import UM2N

return UM2N


@fixture(scope="module")
def firedrake():
import firedrake

return firedrake


@fixture(scope="module")
def movement():
import movement

return movement


def test_import(UM2N, firedrake, movement):
assert UM2N
assert firedrake
assert movement
Loading

0 comments on commit eab819e

Please sign in to comment.