Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

551 Fix premerge py version issue #552

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions .github/workflows/premerge-cpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.8
python-version: 3.9
- name: cache weekly timestamp
id: pip-cache
run: |
Expand All @@ -34,8 +34,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
python -m pip install -r requirements-dev.txt
python -m pip install cffi && python -m pip install pipenv
- name: check
run: |
# clean up temporary files
Expand Down
2 changes: 1 addition & 1 deletion ci/get_bundle_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from utils import get_sub_folders

# new added bundles should temporarily be added to this list, and remove until they can be downloaded successfully
EXCLUDE_LIST = ["segmentation_template", "classification_template"]
EXCLUDE_LIST = []


def main(models_path):
Expand Down
6 changes: 5 additions & 1 deletion ci/run_premerge_cpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,11 @@ elif [[ $# -gt 1 ]]; then
fi

verify_bundle() {
rm -rf /opt/hostedtoolcache
for dir in /opt/hostedtoolcache/*; do
if [[ $dir != "/opt/hostedtoolcache/Python" ]]; then
rm -rf "$dir"
fi
done
echo 'Run verify bundle...'
pip install -r requirements.txt
head_ref=$(git rev-parse HEAD)
Expand Down
6 changes: 3 additions & 3 deletions ci/unit_tests/test_spleen_deepedit_annotation.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ def test_infer_config(self, override):
@parameterized.expand([TEST_CASE_2])
def test_infer_click_config(self, override):
override["dataset_dir"] = self.dataset_dir
override[
"dataset#data"
] = "$[{'image': i, 'background': [], 'spleen': [[6, 6, 6], [8, 8, 8]]} for i in @datalist]"
override["dataset#data"] = (
"$[{'image': i, 'background': [], 'spleen': [[6, 6, 6], [8, 8, 8]]} for i in @datalist]"
)
bundle_root = override["bundle_root"]
sys.path = [bundle_root] + sys.path

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
{
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
"version": "0.2.3",
"version": "0.2.4",
"changelog": {
"0.2.4": "fix black 24.1 format error",
"0.2.3": "update AddChanneld with EnsureChannelFirstd and remove meta_dict",
"0.2.2": "add name tag",
"0.2.1": "fix license Copyright error",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def drop_block_2d(
total_size = w * h
clipped_block_size = min(block_size, min(w, h))
# seed_drop_rate, the gamma parameter
gamma = (
gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))
)
gamma = gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))

# Forces the block to be inside the feature map.
w_i, h_i = torch.meshgrid(torch.arange(w).to(x.device), torch.arange(h).to(x.device))
Expand Down Expand Up @@ -89,9 +87,7 @@ def drop_block_fast_2d(
b, c, h, w = x.shape
total_size = w * h
clipped_block_size = min(block_size, min(w, h))
gamma = (
gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))
)
gamma = gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))

block_mask = torch.empty_like(x).bernoulli_(gamma)
block_mask = F.max_pool2d(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
{
"schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json",
"version": "0.2.3",
"version": "0.2.4",
"changelog": {
"0.2.4": "fix black 24.1 format error",
"0.2.3": "fix PYTHONPATH in readme.md",
"0.2.2": "add name tag",
"0.2.1": "fix license Copyright error",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def drop_block_2d(
total_size = w * h
clipped_block_size = min(block_size, min(w, h))
# seed_drop_rate, the gamma parameter
gamma = (
gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))
)
gamma = gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))

# Forces the block to be inside the feature map.
w_i, h_i = torch.meshgrid(torch.arange(w).to(x.device), torch.arange(h).to(x.device))
Expand Down Expand Up @@ -89,9 +87,7 @@ def drop_block_fast_2d(
b, c, h, w = x.shape
total_size = w * h
clipped_block_size = min(block_size, min(w, h))
gamma = (
gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))
)
gamma = gamma_scale * drop_prob * total_size / clipped_block_size**2 / ((w - block_size + 1) * (h - block_size + 1))

block_mask = torch.empty_like(x).bernoulli_(gamma)
block_mask = F.max_pool2d(
Expand Down
Loading