Skip to content

Commit

Permalink
add vista2d (#618)
Browse files Browse the repository at this point in the history
Fixes # .

### Description
A few sentences describing the changes proposed in this pull request.

### Status
**Ready/Work in progress/Hold**

### Please ensure all the checkboxes:
<!--- Put an `x` in all the boxes that apply, and remove the not
applicable items -->
- [x] Codeformat tests passed locally by running `./runtests.sh
--codeformat`.
- [ ] In-line docstrings updated.
- [ ] Update `version` and `changelog` in `metadata.json` if changing an
existing bundle.
- [ ] Please ensure the naming rules in config files meet our
requirements (please refer to: `CONTRIBUTING.md`).
- [ ] Ensure versions of packages such as `monai`, `pytorch` and `numpy`
are correct in `metadata.json`.
- [ ] Descriptions should be consistent with the content, such as
`eval_metrics` of the provided weights and TorchScript modules.
- [ ] Files larger than 25MB are excluded and replaced by providing
download links in `large_file.yml`.
- [ ] Avoid using path that contains personal information within config
files (such as use `/home/your_name/` for `"bundle_root"`).

---------

Signed-off-by: Yiheng Wang <vennw@nvidia.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
yiheng-wang-nv and pre-commit-ci[bot] authored Aug 23, 2024
1 parent 4dac76f commit d593a05
Show file tree
Hide file tree
Showing 29 changed files with 5,151 additions and 7 deletions.
1 change: 1 addition & 0 deletions ci/bundle_custom_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
"brats_mri_axial_slices_generative_diffusion",
"vista3d",
"maisi_ct_generative",
"vista2d",
]

# This list is used for our CI tests to determine whether a bundle needs to be tested after downloading
Expand Down
125 changes: 125 additions & 0 deletions ci/unit_tests/test_vista2d.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
import sys
import tempfile
import unittest

import matplotlib.pyplot as plt
import numpy as np
from monai.bundle import create_workflow
from parameterized import parameterized
from utils import check_workflow

TEST_CASE_TRAIN = [{"bundle_root": "models/vista2d", "mode": "train", "train#trainer#max_epochs": 1}]

TEST_CASE_INFER = [{"bundle_root": "models/vista2d", "mode": "infer"}]


def test_order(test_name1, test_name2):
def get_order(name):
if "train" in name:
return 1
if "infer" in name:
return 2
return 3

return get_order(test_name1) - get_order(test_name2)


class TestVista2d(unittest.TestCase):
def setUp(self):
self.dataset_dir = tempfile.mkdtemp()
self.tmp_output_dir = os.path.join(self.dataset_dir, "output")
os.makedirs(self.tmp_output_dir, exist_ok=True)
self.dataset_size = 5
input_shape = (256, 256)
for s in range(self.dataset_size):
test_image = np.random.randint(low=0, high=2, size=input_shape).astype(np.int8)
test_label = np.random.randint(low=0, high=2, size=input_shape).astype(np.int8)
image_filename = os.path.join(self.dataset_dir, f"image_{s}.png")
label_filename = os.path.join(self.dataset_dir, f"label_{s}.png")
plt.imsave(image_filename, test_image, cmap="gray")
plt.imsave(label_filename, test_label, cmap="gray")

self.bundle_root = "models/vista2d"
sys.path = [self.bundle_root] + sys.path
from scripts.workflow import VistaCell

self.workflow = VistaCell

def tearDown(self):
shutil.rmtree(self.dataset_dir)

@parameterized.expand([TEST_CASE_INFER])
def test_infer_config(self, override):
# update override with dataset dir
override["dataset#data"] = [
{
"image": os.path.join(self.dataset_dir, f"image_{s}.png"),
"label": os.path.join(self.dataset_dir, f"label_{s}.png"),
}
for s in range(self.dataset_size)
]
override["output_dir"] = self.tmp_output_dir
workflow = create_workflow(
workflow_name=self.workflow,
config_file=os.path.join(self.bundle_root, "configs/hyper_parameters.yaml"),
meta_file=os.path.join(self.bundle_root, "configs/metadata.json"),
**override,
)

# check_properties=False, need to add monai service properties later
check_workflow(workflow, check_properties=False)

expected_output_file = os.path.join(self.tmp_output_dir, f"image_{self.dataset_size-1}.tif")
self.assertTrue(os.path.isfile(expected_output_file))

@parameterized.expand([TEST_CASE_TRAIN])
def test_train_config(self, override):
# update override with dataset dir
override["train#dataset#data"] = [
{
"image": os.path.join(self.dataset_dir, f"image_{s}.png"),
"label": os.path.join(self.dataset_dir, f"label_{s}.png"),
}
for s in range(self.dataset_size)
]
override["dataset#data"] = override["train#dataset#data"]

workflow = create_workflow(
workflow_name=self.workflow,
config_file=os.path.join(self.bundle_root, "configs/hyper_parameters.yaml"),
meta_file=os.path.join(self.bundle_root, "configs/metadata.json"),
**override,
)

# check_properties=False, need to add monai service properties later
check_workflow(workflow, check_properties=False)

# follow up to use trained weights and test eval
override["mode"] = "eval"
override["pretrained_ckpt_name"] = "model.pt"
workflow = create_workflow(
workflow_name=self.workflow,
config_file=os.path.join(self.bundle_root, "configs/hyper_parameters.yaml"),
meta_file=os.path.join(self.bundle_root, "configs/metadata.json"),
**override,
)
check_workflow(workflow, check_properties=False)


if __name__ == "__main__":
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = test_order
unittest.main(testLoader=loader)
70 changes: 70 additions & 0 deletions ci/unit_tests/test_vista2d_dist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
import sys
import tempfile
import unittest

import matplotlib.pyplot as plt
import numpy as np
import torch
from parameterized import parameterized
from utils import export_config_and_run_mgpu_cmd

TEST_CASE_TRAIN_MGPU = [{"bundle_root": "models/vista2d", "workflow_type": "train", "train#trainer#max_epochs": 2}]


class TestVista2d(unittest.TestCase):
def setUp(self):
self.dataset_dir = tempfile.mkdtemp()
self.dataset_size = 5
input_shape = (256, 256)
for s in range(self.dataset_size):
test_image = np.random.randint(low=0, high=2, size=input_shape).astype(np.int8)
test_label = np.random.randint(low=0, high=2, size=input_shape).astype(np.int8)
image_filename = os.path.join(self.dataset_dir, f"image_{s}.png")
label_filename = os.path.join(self.dataset_dir, f"label_{s}.png")
plt.imsave(image_filename, test_image, cmap="gray")
plt.imsave(label_filename, test_label, cmap="gray")

self.bundle_root = "models/vista2d"
sys.path = [self.bundle_root] + sys.path

def tearDown(self):
shutil.rmtree(self.dataset_dir)

@parameterized.expand([TEST_CASE_TRAIN_MGPU])
def test_train_mgpu_config(self, override):
override["train#dataset#data"] = [
{
"image": os.path.join(self.dataset_dir, f"image_{s}.png"),
"label": os.path.join(self.dataset_dir, f"label_{s}.png"),
}
for s in range(self.dataset_size)
]
override["dataset#data"] = override["train#dataset#data"]

output_path = os.path.join(self.bundle_root, "configs/train_override.json")
n_gpu = torch.cuda.device_count()
export_config_and_run_mgpu_cmd(
config_file=os.path.join(self.bundle_root, "configs/hyper_parameters.yaml"),
meta_file=os.path.join(self.bundle_root, "configs/metadata.json"),
custom_workflow="scripts.workflow.VistaCell",
override_dict=override,
output_path=output_path,
ngpu=n_gpu,
)


if __name__ == "__main__":
unittest.main()
47 changes: 40 additions & 7 deletions ci/unit_tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def export_overrided_config(config_file, override_dict, output_path):
ConfigParser.export_config_file(parser.config, output_path, indent=4)


def produce_mgpu_cmd(config_file, meta_file, logging_file, nnodes=1, nproc_per_node=2):
def produce_mgpu_cmd(config_file, meta_file, logging_file=None, nnodes=1, nproc_per_node=2):
cmd = [
"torchrun",
"--standalone",
Expand All @@ -34,20 +34,43 @@ def produce_mgpu_cmd(config_file, meta_file, logging_file, nnodes=1, nproc_per_n
"run",
"--config_file",
config_file,
"--logging_file",
logging_file,
"--meta_file",
meta_file,
]
if logging_file is not None:
cmd.extend(["--logging_file", logging_file])
return cmd


def produce_custom_workflow_mgpu_cmd(
custom_workflow, config_file, meta_file, logging_file=None, nnodes=1, nproc_per_node=2
):
cmd = [
"torchrun",
"--standalone",
f"--nnodes={nnodes}",
f"--nproc_per_node={nproc_per_node}",
"-m",
"monai.bundle",
"run_workflow",
custom_workflow,
"--config_file",
config_file,
"--meta_file",
meta_file,
]
if logging_file is not None:
cmd.extend(["--logging_file", logging_file])
return cmd


def export_config_and_run_mgpu_cmd(
config_file,
meta_file,
logging_file,
override_dict,
output_path,
custom_workflow=None,
logging_file=None,
workflow_type="train",
nnode=1,
ngpu=2,
Expand All @@ -68,9 +91,19 @@ def export_config_and_run_mgpu_cmd(
check_result = engine.check_properties()
if check_result is not None and len(check_result) > 0:
raise ValueError(f"check properties for overrided mgpu configs failed: {check_result}")
cmd = produce_mgpu_cmd(
config_file=output_path, meta_file=meta_file, logging_file=logging_file, nnodes=nnode, nproc_per_node=ngpu
)
if custom_workflow is None:
cmd = produce_mgpu_cmd(
config_file=output_path, meta_file=meta_file, logging_file=logging_file, nnodes=nnode, nproc_per_node=ngpu
)
else:
cmd = produce_custom_workflow_mgpu_cmd(
custom_workflow=custom_workflow,
config_file=output_path,
meta_file=meta_file,
logging_file=logging_file,
nnodes=nnode,
nproc_per_node=ngpu,
)
env = os.environ.copy()
# ensure customized library can be loaded in subprocess
env["PYTHONPATH"] = override_dict.get("bundle_root", ".")
Expand Down
Loading

0 comments on commit d593a05

Please sign in to comment.