From ece832f6d3cd3f2582ea6fd37ecd2bf6d302205f Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:14:43 -0700 Subject: [PATCH 01/36] Add batch_c15n for [0,1] image input and imagenet-normalized input. --- mart/configs/batch_c15n/image_01.yaml | 6 ++++++ .../configs/batch_c15n/imagenet_normalized.yaml | 6 ++++++ .../batch_c15n/transform/255_to_imagenet.yaml | 4 ++++ .../batch_c15n/transform/divided_by_255.yaml | 3 +++ .../batch_c15n/transform/imagenet_to_255.yaml | 17 +++++++++++++++++ .../transform/times_255_and_round.yaml | 13 +++++++++++++ 6 files changed, 49 insertions(+) create mode 100644 mart/configs/batch_c15n/image_01.yaml create mode 100644 mart/configs/batch_c15n/imagenet_normalized.yaml create mode 100644 mart/configs/batch_c15n/transform/255_to_imagenet.yaml create mode 100644 mart/configs/batch_c15n/transform/divided_by_255.yaml create mode 100644 mart/configs/batch_c15n/transform/imagenet_to_255.yaml create mode 100644 mart/configs/batch_c15n/transform/times_255_and_round.yaml diff --git a/mart/configs/batch_c15n/image_01.yaml b/mart/configs/batch_c15n/image_01.yaml new file mode 100644 index 00000000..f3f8e758 --- /dev/null +++ b/mart/configs/batch_c15n/image_01.yaml @@ -0,0 +1,6 @@ +defaults: + - list + - transform: times_255_and_round + - transform@untransform: divided_by_255 + +input_key: 0 diff --git a/mart/configs/batch_c15n/imagenet_normalized.yaml b/mart/configs/batch_c15n/imagenet_normalized.yaml new file mode 100644 index 00000000..e41fb3ff --- /dev/null +++ b/mart/configs/batch_c15n/imagenet_normalized.yaml @@ -0,0 +1,6 @@ +defaults: + - dict + - transform: imagenet_to_255 + - transform@untransform: 255_to_imagenet + +input_key: image diff --git a/mart/configs/batch_c15n/transform/255_to_imagenet.yaml b/mart/configs/batch_c15n/transform/255_to_imagenet.yaml new file mode 100644 index 00000000..9bb9ebef --- /dev/null +++ b/mart/configs/batch_c15n/transform/255_to_imagenet.yaml @@ -0,0 +1,4 @@ +_target_: torchvision.transforms.Normalize +# from 0-1 scale statistics: mean=[0.485, 0.456, 0.406]*255 std=[0.229, 0.224, 0.225]*255 +mean: [123.6750, 116.2800, 103.5300] +std: [58.3950, 57.1200, 57.3750] diff --git a/mart/configs/batch_c15n/transform/divided_by_255.yaml b/mart/configs/batch_c15n/transform/divided_by_255.yaml new file mode 100644 index 00000000..92a63b7c --- /dev/null +++ b/mart/configs/batch_c15n/transform/divided_by_255.yaml @@ -0,0 +1,3 @@ +_target_: torchvision.transforms.Normalize +mean: 0 +std: 255 diff --git a/mart/configs/batch_c15n/transform/imagenet_to_255.yaml b/mart/configs/batch_c15n/transform/imagenet_to_255.yaml new file mode 100644 index 00000000..66a4ef53 --- /dev/null +++ b/mart/configs/batch_c15n/transform/imagenet_to_255.yaml @@ -0,0 +1,17 @@ +_target_: torchvision.transforms.Compose +transforms: + - _target_: mart.transforms.Denormalize + # from 0-1 scale statistics: mean=[0.485, 0.456, 0.406]*255 std=[0.229, 0.224, 0.225]*255 + center: + _target_: torch.as_tensor + data: [123.6750, 116.2800, 103.5300] + scale: + _target_: torch.as_tensor + data: [58.3950, 57.1200, 57.3750] + - _target_: torch.fake_quantize_per_tensor_affine + _partial_: true + # (x/1+0).round().clamp(0, 255) * 1 + scale: 1 + zero_point: 0 + quant_min: 0 + quant_max: 255 diff --git a/mart/configs/batch_c15n/transform/times_255_and_round.yaml b/mart/configs/batch_c15n/transform/times_255_and_round.yaml new file mode 100644 index 00000000..dbeff64d --- /dev/null +++ b/mart/configs/batch_c15n/transform/times_255_and_round.yaml @@ -0,0 +1,13 @@ +_target_: torchvision.transforms.Compose +transforms: + - _target_: mart.transforms.Denormalize + center: 0 + scale: 255 + # Fix potential numeric error. + - _target_: torch.fake_quantize_per_tensor_affine + _partial_: true + # (x/1+0).round().clamp(0, 255) * 1 + scale: 1 + zero_point: 0 + quant_min: 0 + quant_max: 255 From 726a0af0cc93120d7acf329cf841e49566d2b583 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:18:27 -0700 Subject: [PATCH 02/36] Turn off inference mode before creating perturbations. --- mart/attack/adversary.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mart/attack/adversary.py b/mart/attack/adversary.py index 9fbdec75..1555620b 100644 --- a/mart/attack/adversary.py +++ b/mart/attack/adversary.py @@ -151,6 +151,8 @@ def configure_gradient_clipping( for group in optimizer.param_groups: self.gradient_modifier(group["params"]) + # Turn off the inference mode, so we will create perturbation that requires gradient. + @torch.inference_mode(False) @silent() def fit(self, input, target, *, model: Callable): # The attack also needs access to the model at every iteration. From b0c307970a07264a2125595d02c9b0b6256982e1 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:18:51 -0700 Subject: [PATCH 03/36] Switch to training mode before running LightningModule.training_step(). --- mart/callbacks/adversary_connector.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/mart/callbacks/adversary_connector.py b/mart/callbacks/adversary_connector.py index a35b225b..db8fe7b0 100644 --- a/mart/callbacks/adversary_connector.py +++ b/mart/callbacks/adversary_connector.py @@ -7,7 +7,7 @@ from __future__ import annotations import types -from typing import Callable +from typing import Any, Callable from lightning.pytorch.callbacks import Callback @@ -16,6 +16,20 @@ __all__ = ["AdversaryConnector"] +class training_mode: + """A context that switches a torch.nn.Module object to the training mode.""" + + def __init__(self, module): + self.module = module + self.training = self.module.training + + def __enter__(self): + self.module.train(True) + + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any): + self.module.train(self.training) + + class AdversaryConnector(Callback): """Perturbs inputs to be adversarial.""" @@ -81,7 +95,9 @@ def model(input, target): # LightningModule must have "training_step". # Disable logging if we have to reuse training_step() of the target model. with MonkeyPatch(pl_module, "log", lambda *args, **kwargs: None): - outputs = pl_module.training_step(batch, dataloader_idx) + # Switch the model to the training mode so traing_step works as expected. + with training_mode(pl_module): + outputs = pl_module.training_step(batch, dataloader_idx) return outputs # Canonicalize the batch to work with Adversary. From eba3c2bb6ca988ebc7f1ea9171a2f4f28238ee44 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:21:16 -0700 Subject: [PATCH 04/36] Add utils for config instantiation. --- mart/utils/config.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/mart/utils/config.py b/mart/utils/config.py index 36dbb6e6..3a4268ad 100644 --- a/mart/utils/config.py +++ b/mart/utils/config.py @@ -8,14 +8,17 @@ import os +import hydra from hydra import compose as hydra_compose from hydra import initialize_config_dir +from lightning.pytorch.callbacks.callback import Callback +from omegaconf import OmegaConf DEFAULT_VERSION_BASE = "1.2" DEFAULT_CONFIG_DIR = "." DEFAULT_CONFIG_NAME = "lightning.yaml" -__all__ = ["compose"] +__all__ = ["compose", "instantiate", "Instantiator", "CallbackInstantiator"] def compose( @@ -40,3 +43,28 @@ def compose( cfg = cfg[key] return cfg + + +def instantiate(cfg_path): + """Instantiate an object from a Hydra yaml config file.""" + config = OmegaConf.load(cfg_path) + obj = hydra.utils.instantiate(config) + return obj + + +class Instantiator: + def __new__(cls, cfg_path): + return instantiate(cfg_path) + + +class CallbackInstantiator(Callback): + """Type checking for Lightning Callback.""" + + def __new__(cls, cfg_path): + obj = instantiate(cfg_path) + if isinstance(obj, Callback): + return obj + else: + raise ValueError( + f"We expect to instantiate a lightning Callback from {cfg_path}, but we get {type(obj)} instead." + ) From dd10c795b4a7101796c1803dde21816c01f4de37 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:27:44 -0700 Subject: [PATCH 05/36] Add mart.utils.Get() to extract a value from kwargs dict. --- mart/utils/utils.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/mart/utils/utils.py b/mart/utils/utils.py index f4a0a4ec..dbf3fb21 100644 --- a/mart/utils/utils.py +++ b/mart/utils/utils.py @@ -28,6 +28,7 @@ "save_file", "task_wrapper", "flatten_dict", + "Get", ] log = pylogger.get_pylogger(__name__) @@ -293,3 +294,13 @@ def get_dottedpath_items(d: dict, parent: Optional[str] = None): ret[key] = value return ret + + +class Get: + """Get a value from the kwargs dictionary by key.""" + + def __init__(self, key): + self.key = key + + def __call__(self, **kwargs): + return kwargs[self.key] From 43d63f54adb0c596c80eb2a49d577b8430892485 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 09:35:55 -0700 Subject: [PATCH 06/36] Upgrade Lightning from 2.0 to 2.1. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6599ba3a..cce09539 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ requires-python = ">=3.9" dependencies = [ "torch >= 2.0.1", "torchvision >= 0.15.2", - "lightning[extra] ~= 2.0.5", # Full functionality including TensorboardX. + "lightning[extra] ~= 2.1.4", # Full functionality including TensorboardX. "pydantic == 1.10.11", # https://github.com/Lightning-AI/lightning/pull/18022/files "torchmetrics == 1.0.1", "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 From f4f1f1aa89776d23eab6c042a5e6e1bd8fc8637c Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 11:18:51 -0700 Subject: [PATCH 07/36] Move most dependencies to optional groups. --- pyproject.toml | 45 ++++++++++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cce09539..7703d776 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,32 +11,13 @@ authors = [ requires-python = ">=3.9" dependencies = [ - "torch >= 2.0.1", - "torchvision >= 0.15.2", - "lightning[extra] ~= 2.1.4", # Full functionality including TensorboardX. - "pydantic == 1.10.11", # https://github.com/Lightning-AI/lightning/pull/18022/files - "torchmetrics == 1.0.1", - "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 - # --------- hydra --------- # "hydra-core ~= 1.2.0", "hydra-colorlog ~= 1.2.0", "hydra-optuna-sweeper ~= 1.2.0", - # --------- loggers --------- # - # wandb - # neptune-client - # mlflow - # comet-ml - # --------- others --------- # "pyrootutils ~= 1.0.4", # standardizing the project root setup - "rich ~= 12.6.0", # beautiful text formatting in terminal - "timm ~= 0.6.11", # pytorch image models - - # ----- object detection----- # - "pycocotools ~= 2.0.5", - "fire == 0.5.0", ] @@ -47,6 +28,31 @@ Source = "https://github.com/IntelLabs/MART" mart = "mart.__main__:main" [project.optional-dependencies] +core = [ + "torch >= 2.0.1", + "torchvision >= 0.15.2", + "lightning[extra] ~= 2.1.4", # Full functionality including TensorboardX. + "pydantic == 1.10.11", # https://github.com/Lightning-AI/lightning/pull/18022/files + "torchmetrics >= 1.3.2", + "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 +] + +loggers = [ + "wandb", + "neptune-client", + "mlflow", + "comet-ml", +] + +models = [ + "timm ~= 0.6.11", # pytorch image models +] + +objdet = [ + # ----- object detection----- # + "pycocotools ~= 2.0.5", +] + developer = [ "pre-commit ~= 2.20.0", # hooks for applying linters on commit "pytest ~= 7.2.0", # tests @@ -61,6 +67,7 @@ fiftyone = [ ] extras = [ + "rich ~= 12.6.0", # beautiful text formatting in terminal ] [tool.setuptools] From a18b691303d783e4eb5f1c28bc155a154dcd18be Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 13:18:01 -0700 Subject: [PATCH 08/36] Avoid importing everything in mart. --- mart/__init__.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/mart/__init__.py b/mart/__init__.py index 85181105..2c02b644 100644 --- a/mart/__init__.py +++ b/mart/__init__.py @@ -1,11 +1,3 @@ -import importlib - -from mart import attack as attack -from mart import datamodules as datamodules -from mart import models as models -from mart import nn as nn -from mart import optim as optim -from mart import transforms as transforms -from mart import utils as utils +import importlib.metadata __version__ = importlib.metadata.version(__package__ or __name__) From 6c8a99dcd3a8598f273f656e7bbd1a93e1ec8d7a Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 2 May 2024 13:21:21 -0700 Subject: [PATCH 09/36] Make a separate naming space mart.transforms.objdet for object detection transforms. --- mart/transforms/__init__.py | 3 ++ mart/transforms/extended.py | 74 ------------------------------ mart/transforms/objdet.py | 91 +++++++++++++++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 74 deletions(-) create mode 100644 mart/transforms/objdet.py diff --git a/mart/transforms/__init__.py b/mart/transforms/__init__.py index 8f30723a..19acd8c7 100644 --- a/mart/transforms/__init__.py +++ b/mart/transforms/__init__.py @@ -7,3 +7,6 @@ from .batch_c15n import * # noqa: F403 from .extended import * # noqa: F403 from .transforms import * # noqa: F403 + +# We don't import .objdet here, because we may not install the object detection related packages, such as pycocotools. +# from .objdet import * diff --git a/mart/transforms/extended.py b/mart/transforms/extended.py index ba827865..41595383 100644 --- a/mart/transforms/extended.py +++ b/mart/transforms/extended.py @@ -6,19 +6,12 @@ import logging import os -from typing import Dict, Optional, Tuple import numpy as np import torch from PIL import Image, ImageOps -from torch import Tensor -from torchvision.transforms import functional as F from torchvision.transforms import transforms as T -# FIXME: We really shouldn't be importing private functions -from .torchvision_ref import ConvertCocoPolysToMask as ConvertCocoPolysToMask_ -from .torchvision_ref import _flip_coco_person_keypoints - logger = logging.getLogger(__name__) __all__ = [ @@ -28,9 +21,6 @@ "SplitLambda", "LoadPerturbableMask", "LoadCoords", - "ConvertInstanceSegmentationToPerturbable", - "RandomHorizontalFlip", - "ConvertCocoPolysToMask", ] @@ -113,17 +103,6 @@ def __call__(self, tensor, **kwargs): return tensor -class ConvertInstanceSegmentationToPerturbable(ExTransform): - """Merge all instance masks and reverse.""" - - def __call__(self, image, target): - perturbable_mask = torch.sum(target["masks"], dim=0) == 0 - # Convert to float to be differentiable. - target["perturbable_mask"] = perturbable_mask.float() - - return image, target - - class LoadPerturbableMask(ExTransform): """Load perturbable masks and add to target.""" @@ -158,56 +137,3 @@ def __call__(self, image, target): # Convert to float to be differentiable. target["coords"] = coords return image, target - - -class RandomHorizontalFlip(T.RandomHorizontalFlip, ExTransform): - """Flip the image and annotations including boxes, masks, keypoints and the - perturable_masks.""" - - @staticmethod - def flip_boxes(image, target): - width, _ = F.get_image_size(image) - target["boxes"][:, [0, 2]] = width - target["boxes"][:, [2, 0]] - return image, target - - @staticmethod - def flip_masks(image, target): - target["masks"] = target["masks"].flip(-1) - return image, target - - @staticmethod - def flip_keypoints(image, target): - width, _ = F.get_image_size(image) - keypoints = target["keypoints"] - keypoints = _flip_coco_person_keypoints(keypoints, width) - target["keypoints"] = keypoints - return image, target - - @staticmethod - def flip_perturable_masks(image, target): - target["masks"] = target["masks"].flip(-1) - return image, target - - @staticmethod - def flip_perturbable_mask(image, target): - target["perturbable_mask"] = target["perturbable_mask"].flip(-1) - return image, target - - def forward( - self, image: Tensor, target: Optional[Dict[str, Tensor]] = None - ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: - if torch.rand(1) < self.p: - image = F.hflip(image) - if target is not None: - image, target = self.flip_boxes(image, target) - if "masks" in target: - image, target = self.flip_masks(image, target) - if "keypoints" in target: - image, target = self.flip_keypoints(image, target) - if "perturbable_mask" in target: - image, target = self.flip_perturable_masks(image, target) - return image, target - - -class ConvertCocoPolysToMask(ConvertCocoPolysToMask_, ExTransform): - pass diff --git a/mart/transforms/objdet.py b/mart/transforms/objdet.py new file mode 100644 index 00000000..68a7686f --- /dev/null +++ b/mart/transforms/objdet.py @@ -0,0 +1,91 @@ +# +# Copyright (C) 2022 Intel Corporation +# +# SPDX-License-Identifier: BSD-3-Clause +# + +import logging +from typing import Dict, Optional, Tuple + +import torch +from torch import Tensor +from torchvision.transforms import functional as F +from torchvision.transforms import transforms as T + +from .extended import ExTransform + +# FIXME: We really shouldn't be importing private functions +from .torchvision_ref import ConvertCocoPolysToMask as ConvertCocoPolysToMask_ +from .torchvision_ref import _flip_coco_person_keypoints + +logger = logging.getLogger(__name__) + +__all__ = [ + "ConvertInstanceSegmentationToPerturbable", + "RandomHorizontalFlip", + "ConvertCocoPolysToMask", +] + + +class ConvertInstanceSegmentationToPerturbable(ExTransform): + """Merge all instance masks and reverse.""" + + def __call__(self, image, target): + perturbable_mask = torch.sum(target["masks"], dim=0) == 0 + # Convert to float to be differentiable. + target["perturbable_mask"] = perturbable_mask.float() + + return image, target + + +class RandomHorizontalFlip(T.RandomHorizontalFlip, ExTransform): + """Flip the image and annotations including boxes, masks, keypoints and the + perturable_masks.""" + + @staticmethod + def flip_boxes(image, target): + width, _ = F.get_image_size(image) + target["boxes"][:, [0, 2]] = width - target["boxes"][:, [2, 0]] + return image, target + + @staticmethod + def flip_masks(image, target): + target["masks"] = target["masks"].flip(-1) + return image, target + + @staticmethod + def flip_keypoints(image, target): + width, _ = F.get_image_size(image) + keypoints = target["keypoints"] + keypoints = _flip_coco_person_keypoints(keypoints, width) + target["keypoints"] = keypoints + return image, target + + @staticmethod + def flip_perturable_masks(image, target): + target["masks"] = target["masks"].flip(-1) + return image, target + + @staticmethod + def flip_perturbable_mask(image, target): + target["perturbable_mask"] = target["perturbable_mask"].flip(-1) + return image, target + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if torch.rand(1) < self.p: + image = F.hflip(image) + if target is not None: + image, target = self.flip_boxes(image, target) + if "masks" in target: + image, target = self.flip_masks(image, target) + if "keypoints" in target: + image, target = self.flip_keypoints(image, target) + if "perturbable_mask" in target: + image, target = self.flip_perturable_masks(image, target) + return image, target + + +class ConvertCocoPolysToMask(ConvertCocoPolysToMask_, ExTransform): + pass From e0dc984b59d948c10cb786f351a4f0358a5f3183 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Tue, 14 May 2024 11:59:34 -0700 Subject: [PATCH 10/36] Comment --- mart/utils/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mart/utils/config.py b/mart/utils/config.py index 3a4268ad..578b4351 100644 --- a/mart/utils/config.py +++ b/mart/utils/config.py @@ -58,7 +58,7 @@ def __new__(cls, cfg_path): class CallbackInstantiator(Callback): - """Type checking for Lightning Callback.""" + """Satisfying type checking for Lightning Callback.""" def __new__(cls, cfg_path): obj = instantiate(cfg_path) From f4e9acc3a88d2bf08d69a3bc322f789ae80201ce Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Tue, 14 May 2024 22:22:31 -0700 Subject: [PATCH 11/36] Clean up. --- mart/configs/batch_c15n/image_01.yaml | 6 ------ mart/configs/batch_c15n/imagenet_normalized.yaml | 6 ------ 2 files changed, 12 deletions(-) delete mode 100644 mart/configs/batch_c15n/image_01.yaml delete mode 100644 mart/configs/batch_c15n/imagenet_normalized.yaml diff --git a/mart/configs/batch_c15n/image_01.yaml b/mart/configs/batch_c15n/image_01.yaml deleted file mode 100644 index f3f8e758..00000000 --- a/mart/configs/batch_c15n/image_01.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - list - - transform: times_255_and_round - - transform@untransform: divided_by_255 - -input_key: 0 diff --git a/mart/configs/batch_c15n/imagenet_normalized.yaml b/mart/configs/batch_c15n/imagenet_normalized.yaml deleted file mode 100644 index e41fb3ff..00000000 --- a/mart/configs/batch_c15n/imagenet_normalized.yaml +++ /dev/null @@ -1,6 +0,0 @@ -defaults: - - dict - - transform: imagenet_to_255 - - transform@untransform: 255_to_imagenet - -input_key: image From b283bf1d126d09f1aaf5d16d7e81e07cdb6c4a63 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 09:37:10 -0700 Subject: [PATCH 12/36] Move to mart.nn.Get(). --- mart/nn/nn.py | 12 +++++++++++- mart/utils/utils.py | 11 ----------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/mart/nn/nn.py b/mart/nn/nn.py index 02113899..147a4773 100644 --- a/mart/nn/nn.py +++ b/mart/nn/nn.py @@ -13,7 +13,7 @@ import torch -__all__ = ["GroupNorm32", "SequentialDict", "ReturnKwargs", "CallWith", "Sum"] +__all__ = ["GroupNorm32", "SequentialDict", "ReturnKwargs", "CallWith", "Sum", "Get"] logger = logging.getLogger(__name__) @@ -300,3 +300,13 @@ def __init__(self): def forward(self, *args): return sum(args) + + +class Get: + """Get a value from the kwargs dictionary by key.""" + + def __init__(self, key): + self.key = key + + def __call__(self, **kwargs): + return kwargs[self.key] diff --git a/mart/utils/utils.py b/mart/utils/utils.py index dbf3fb21..f4a0a4ec 100644 --- a/mart/utils/utils.py +++ b/mart/utils/utils.py @@ -28,7 +28,6 @@ "save_file", "task_wrapper", "flatten_dict", - "Get", ] log = pylogger.get_pylogger(__name__) @@ -294,13 +293,3 @@ def get_dottedpath_items(d: dict, parent: Optional[str] = None): ret[key] = value return ret - - -class Get: - """Get a value from the kwargs dictionary by key.""" - - def __init__(self, key): - self.key = key - - def __call__(self, **kwargs): - return kwargs[self.key] From b96a99a2928ed40c1b90ab3b8de6530b737d6f30 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 10:06:58 -0700 Subject: [PATCH 13/36] Revert changes in transforms. --- mart/transforms/objdet.py | 91 ----------------------- mart/transforms/vision/objdet/extended.py | 74 ++++++++++++++++++ 2 files changed, 74 insertions(+), 91 deletions(-) delete mode 100644 mart/transforms/objdet.py diff --git a/mart/transforms/objdet.py b/mart/transforms/objdet.py deleted file mode 100644 index 68a7686f..00000000 --- a/mart/transforms/objdet.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright (C) 2022 Intel Corporation -# -# SPDX-License-Identifier: BSD-3-Clause -# - -import logging -from typing import Dict, Optional, Tuple - -import torch -from torch import Tensor -from torchvision.transforms import functional as F -from torchvision.transforms import transforms as T - -from .extended import ExTransform - -# FIXME: We really shouldn't be importing private functions -from .torchvision_ref import ConvertCocoPolysToMask as ConvertCocoPolysToMask_ -from .torchvision_ref import _flip_coco_person_keypoints - -logger = logging.getLogger(__name__) - -__all__ = [ - "ConvertInstanceSegmentationToPerturbable", - "RandomHorizontalFlip", - "ConvertCocoPolysToMask", -] - - -class ConvertInstanceSegmentationToPerturbable(ExTransform): - """Merge all instance masks and reverse.""" - - def __call__(self, image, target): - perturbable_mask = torch.sum(target["masks"], dim=0) == 0 - # Convert to float to be differentiable. - target["perturbable_mask"] = perturbable_mask.float() - - return image, target - - -class RandomHorizontalFlip(T.RandomHorizontalFlip, ExTransform): - """Flip the image and annotations including boxes, masks, keypoints and the - perturable_masks.""" - - @staticmethod - def flip_boxes(image, target): - width, _ = F.get_image_size(image) - target["boxes"][:, [0, 2]] = width - target["boxes"][:, [2, 0]] - return image, target - - @staticmethod - def flip_masks(image, target): - target["masks"] = target["masks"].flip(-1) - return image, target - - @staticmethod - def flip_keypoints(image, target): - width, _ = F.get_image_size(image) - keypoints = target["keypoints"] - keypoints = _flip_coco_person_keypoints(keypoints, width) - target["keypoints"] = keypoints - return image, target - - @staticmethod - def flip_perturable_masks(image, target): - target["masks"] = target["masks"].flip(-1) - return image, target - - @staticmethod - def flip_perturbable_mask(image, target): - target["perturbable_mask"] = target["perturbable_mask"].flip(-1) - return image, target - - def forward( - self, image: Tensor, target: Optional[Dict[str, Tensor]] = None - ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: - if torch.rand(1) < self.p: - image = F.hflip(image) - if target is not None: - image, target = self.flip_boxes(image, target) - if "masks" in target: - image, target = self.flip_masks(image, target) - if "keypoints" in target: - image, target = self.flip_keypoints(image, target) - if "perturbable_mask" in target: - image, target = self.flip_perturable_masks(image, target) - return image, target - - -class ConvertCocoPolysToMask(ConvertCocoPolysToMask_, ExTransform): - pass diff --git a/mart/transforms/vision/objdet/extended.py b/mart/transforms/vision/objdet/extended.py index 41595383..ba827865 100644 --- a/mart/transforms/vision/objdet/extended.py +++ b/mart/transforms/vision/objdet/extended.py @@ -6,12 +6,19 @@ import logging import os +from typing import Dict, Optional, Tuple import numpy as np import torch from PIL import Image, ImageOps +from torch import Tensor +from torchvision.transforms import functional as F from torchvision.transforms import transforms as T +# FIXME: We really shouldn't be importing private functions +from .torchvision_ref import ConvertCocoPolysToMask as ConvertCocoPolysToMask_ +from .torchvision_ref import _flip_coco_person_keypoints + logger = logging.getLogger(__name__) __all__ = [ @@ -21,6 +28,9 @@ "SplitLambda", "LoadPerturbableMask", "LoadCoords", + "ConvertInstanceSegmentationToPerturbable", + "RandomHorizontalFlip", + "ConvertCocoPolysToMask", ] @@ -103,6 +113,17 @@ def __call__(self, tensor, **kwargs): return tensor +class ConvertInstanceSegmentationToPerturbable(ExTransform): + """Merge all instance masks and reverse.""" + + def __call__(self, image, target): + perturbable_mask = torch.sum(target["masks"], dim=0) == 0 + # Convert to float to be differentiable. + target["perturbable_mask"] = perturbable_mask.float() + + return image, target + + class LoadPerturbableMask(ExTransform): """Load perturbable masks and add to target.""" @@ -137,3 +158,56 @@ def __call__(self, image, target): # Convert to float to be differentiable. target["coords"] = coords return image, target + + +class RandomHorizontalFlip(T.RandomHorizontalFlip, ExTransform): + """Flip the image and annotations including boxes, masks, keypoints and the + perturable_masks.""" + + @staticmethod + def flip_boxes(image, target): + width, _ = F.get_image_size(image) + target["boxes"][:, [0, 2]] = width - target["boxes"][:, [2, 0]] + return image, target + + @staticmethod + def flip_masks(image, target): + target["masks"] = target["masks"].flip(-1) + return image, target + + @staticmethod + def flip_keypoints(image, target): + width, _ = F.get_image_size(image) + keypoints = target["keypoints"] + keypoints = _flip_coco_person_keypoints(keypoints, width) + target["keypoints"] = keypoints + return image, target + + @staticmethod + def flip_perturable_masks(image, target): + target["masks"] = target["masks"].flip(-1) + return image, target + + @staticmethod + def flip_perturbable_mask(image, target): + target["perturbable_mask"] = target["perturbable_mask"].flip(-1) + return image, target + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if torch.rand(1) < self.p: + image = F.hflip(image) + if target is not None: + image, target = self.flip_boxes(image, target) + if "masks" in target: + image, target = self.flip_masks(image, target) + if "keypoints" in target: + image, target = self.flip_keypoints(image, target) + if "perturbable_mask" in target: + image, target = self.flip_perturable_masks(image, target) + return image, target + + +class ConvertCocoPolysToMask(ConvertCocoPolysToMask_, ExTransform): + pass From 89eea7718342b13a76b24b2db9cca3a82c198fb0 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 10:07:22 -0700 Subject: [PATCH 14/36] Revert changes in __init__.py --- mart/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/mart/__init__.py b/mart/__init__.py index 2c02b644..85181105 100644 --- a/mart/__init__.py +++ b/mart/__init__.py @@ -1,3 +1,11 @@ -import importlib.metadata +import importlib + +from mart import attack as attack +from mart import datamodules as datamodules +from mart import models as models +from mart import nn as nn +from mart import optim as optim +from mart import transforms as transforms +from mart import utils as utils __version__ = importlib.metadata.version(__package__ or __name__) From 9491cc43cd96454ac87786a9da86882a37537dc9 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 10:08:20 -0700 Subject: [PATCH 15/36] Revert changes in pyproject.toml. --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 47ebfbce..3e614550 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,7 +74,6 @@ full = [ ] extras = [ - "rich ~= 12.6.0", # beautiful text formatting in terminal ] [tool.setuptools] From ec63b73403c78fe752c947b337bf07ecdeabe5b2 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 10:33:41 -0700 Subject: [PATCH 16/36] Skip object detection tests if pycocotools is not installed. --- tests/conftest.py | 7 ++++++- tests/test_experiments.py | 17 +++++++++++++---- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 74ce8433..f5745d66 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,7 @@ from hydra.core.global_hydra import GlobalHydra from omegaconf import DictConfig, open_dict -from mart.utils.imports import _HAS_TIMM, _HAS_TORCHVISION +from mart.utils.imports import _HAS_PYCOCOTOOLS, _HAS_TIMM, _HAS_TORCHVISION root = Path(os.getcwd()) pyrootutils.set_root(path=root, dotenv=True, pythonpath=True) @@ -22,6 +22,9 @@ experiments_require_torchvision = [ "CIFAR10_CNN", "CIFAR10_CNN_Adv", +] + +experiments_require_torchvision_pycocotools = [ "COCO_TorchvisionFasterRCNN", "COCO_TorchvisionFasterRCNN_Adv", "COCO_TorchvisionRetinaNet", @@ -35,6 +38,8 @@ experiments_names = [] if _HAS_TORCHVISION: experiments_names += experiments_require_torchvision +if _HAS_TORCHVISION and _HAS_PYCOCOTOOLS: + experiments_names += experiments_require_torchvision_pycocotools if _HAS_TIMM and _HAS_TORCHVISION: experiments_names += experiments_require_torchvision_and_timm diff --git a/tests/test_experiments.py b/tests/test_experiments.py index e1f1ff01..637660ea 100644 --- a/tests/test_experiments.py +++ b/tests/test_experiments.py @@ -4,7 +4,7 @@ import pytest from hydra.core.global_hydra import GlobalHydra -from mart.utils.imports import _HAS_TIMM, _HAS_TORCHVISION +from mart.utils.imports import _HAS_PYCOCOTOOLS, _HAS_TIMM, _HAS_TORCHVISION from tests.helpers.dataset_generator import FakeCOCODataset from tests.helpers.run_if import RunIf from tests.helpers.run_sh_command import run_sh_command @@ -131,7 +131,10 @@ def test_imagenet_timm_experiment(classification_cfg, tmp_path): @RunIf(sh=True) @pytest.mark.slow -@pytest.mark.skipif(not _HAS_TORCHVISION, reason="test requires that torchvision is installed") +@pytest.mark.skipif( + not _HAS_TORCHVISION or not _HAS_PYCOCOTOOLS, + reason="test requires that torchvision and pycocotools are installed", +) def test_coco_fasterrcnn_experiment(coco_cfg, tmp_path): """Test TorchVision FasterRCNN experiment.""" overrides = coco_cfg["trainer"] + coco_cfg["datamodel"] @@ -147,7 +150,10 @@ def test_coco_fasterrcnn_experiment(coco_cfg, tmp_path): @RunIf(sh=True) @pytest.mark.slow -@pytest.mark.skipif(not _HAS_TORCHVISION, reason="test requires that torchvision is installed") +@pytest.mark.skipif( + not _HAS_TORCHVISION or not _HAS_PYCOCOTOOLS, + reason="test requires that torchvision and pycocotools are installed", +) def test_coco_fasterrcnn_adv_experiment(coco_cfg, tmp_path): """Test TorchVision FasterRCNN Adv experiment.""" overrides = coco_cfg["trainer"] + coco_cfg["datamodel"] @@ -163,7 +169,10 @@ def test_coco_fasterrcnn_adv_experiment(coco_cfg, tmp_path): @RunIf(sh=True) @pytest.mark.slow -@pytest.mark.skipif(not _HAS_TORCHVISION, reason="test requires that torchvision is installed") +@pytest.mark.skipif( + not _HAS_TORCHVISION or not _HAS_PYCOCOTOOLS, + reason="test requires that torchvision and pycocotools installed", +) def test_coco_retinanet_experiment(coco_cfg, tmp_path): """Test TorchVision RetinaNet experiment.""" overrides = coco_cfg["trainer"] + coco_cfg["datamodel"] From e063944052f8dded87b6c85d78a62eefbbe20e93 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 11:25:32 -0700 Subject: [PATCH 17/36] Simplify dependency with newer lightning 2.1+. --- pyproject.toml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3e614550..378b2a57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,8 +36,11 @@ core = [ "lightning[extra] ~= 2.1.4", # Full functionality including TensorboardX. "torchmetrics == 1.0.1", # TODO: Remove pydantic and numpy constraints with newer lightning. - "pydantic == 1.10.14", # https://github.com/Lightning-AI/lightning/pull/18022/files - "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 + # Fixed in Lightning 2.1.0+: https://github.com/Lightning-AI/pytorch-lightning/commit/df959aeb4fd454d515a74b0553ad7018b73d1f15 + # "pydantic == 1.10.14", # https://github.com/Lightning-AI/lightning/pull/18022/files + # Fixed in PyTorch 2.0+: https://github.com/pytorch/pytorch/pull/96452 + # "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 + # numpy 1.26.4 ] vision = [ @@ -46,7 +49,8 @@ vision = [ ] objdet = [ - "pycocotools ~= 2.0.5", # data format for object detection. + # pycocotools<2.0.6 use np.float that is deprecated since numpy 1.20.0 with a warning, later with an error. + "pycocotools ~= 2.0.6", # data format for object detection. "fiftyone ~= 0.21.4", # visualization for object detection ] From 63286d016077cffb7525cc4186bf9c1f757b914b Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 11:26:11 -0700 Subject: [PATCH 18/36] Clean up comments. --- pyproject.toml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 378b2a57..25153293 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,12 +35,6 @@ core = [ "torch >= 2.0.1", "lightning[extra] ~= 2.1.4", # Full functionality including TensorboardX. "torchmetrics == 1.0.1", - # TODO: Remove pydantic and numpy constraints with newer lightning. - # Fixed in Lightning 2.1.0+: https://github.com/Lightning-AI/pytorch-lightning/commit/df959aeb4fd454d515a74b0553ad7018b73d1f15 - # "pydantic == 1.10.14", # https://github.com/Lightning-AI/lightning/pull/18022/files - # Fixed in PyTorch 2.0+: https://github.com/pytorch/pytorch/pull/96452 - # "numpy == 1.23.5", # https://github.com/pytorch/pytorch/issues/91516 - # numpy 1.26.4 ] vision = [ @@ -49,7 +43,6 @@ vision = [ ] objdet = [ - # pycocotools<2.0.6 use np.float that is deprecated since numpy 1.20.0 with a warning, later with an error. "pycocotools ~= 2.0.6", # data format for object detection. "fiftyone ~= 0.21.4", # visualization for object detection ] From d725ea3e9b36ed82025fc03dd9bb5aa93b81b363 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 12:08:10 -0700 Subject: [PATCH 19/36] Remove the version constraint on rich, due to the conflict with Anomalib. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 25153293..a9b5626b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ dependencies = [ # --------- basics --------- # "pyrootutils ~= 1.0.4", # standardizing the project root setup - "rich ~= 12.6.0", # beautiful text formatting in terminal + "rich", # beautiful text formatting in terminal "fire == 0.5.0", ] From 8d19d9bbd472707cdaead80924e623d03613ac90 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 12:13:19 -0700 Subject: [PATCH 20/36] Make a package anomalib_adversary --- examples/anomalib_adversary/.gitignore | 2 ++ examples/anomalib_adversary/requirements.txt | 2 ++ examples/anomalib_adversary/setup.py | 8 ++++++++ 3 files changed, 12 insertions(+) create mode 100644 examples/anomalib_adversary/.gitignore create mode 100644 examples/anomalib_adversary/requirements.txt create mode 100644 examples/anomalib_adversary/setup.py diff --git a/examples/anomalib_adversary/.gitignore b/examples/anomalib_adversary/.gitignore new file mode 100644 index 00000000..3747c7e1 --- /dev/null +++ b/examples/anomalib_adversary/.gitignore @@ -0,0 +1,2 @@ +results +datasets diff --git a/examples/anomalib_adversary/requirements.txt b/examples/anomalib_adversary/requirements.txt new file mode 100644 index 00000000..ae34b0f9 --- /dev/null +++ b/examples/anomalib_adversary/requirements.txt @@ -0,0 +1,2 @@ +anomalib[full] @ git+https://github.com/openvinotoolkit/anomalib.git@v1.0.1 +mart @ git+https://github.com/IntelLabs/MART@mzweilin/upgrade_lightning_2.1.4 diff --git a/examples/anomalib_adversary/setup.py b/examples/anomalib_adversary/setup.py new file mode 100644 index 00000000..d9986611 --- /dev/null +++ b/examples/anomalib_adversary/setup.py @@ -0,0 +1,8 @@ +import setuptools + +setuptools.setup( + name="anomalib_adversary", + version="0.1.0a", + description="Evaluating robustness of anomaly detection models in Anomalib.", + packages=["anomalib_adversary"], +) From 94d4d4485a9ddd89a2bbc6b31f4ee9fe0a39e750 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 12:14:02 -0700 Subject: [PATCH 21/36] Add two model configs for Anomalib. --- .../configs/anomalib/stfpm.yaml | 21 ++++++++++++++++ .../configs/anomalib/stfpm_mart.yaml | 24 +++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 examples/anomalib_adversary/configs/anomalib/stfpm.yaml create mode 100644 examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml diff --git a/examples/anomalib_adversary/configs/anomalib/stfpm.yaml b/examples/anomalib_adversary/configs/anomalib/stfpm.yaml new file mode 100644 index 00000000..c5e783ba --- /dev/null +++ b/examples/anomalib_adversary/configs/anomalib/stfpm.yaml @@ -0,0 +1,21 @@ +model: + class_path: anomalib.models.Stfpm + init_args: + backbone: resnet18 + layers: + - layer1 + - layer2 + - layer3 + +metrics: + pixel: + - AUROC + +trainer: + max_epochs: 100 + callbacks: + - class_path: lightning.pytorch.callbacks.EarlyStopping + init_args: + patience: 5 + monitor: pixel_AUROC + mode: max diff --git a/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml b/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml new file mode 100644 index 00000000..f83b1494 --- /dev/null +++ b/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml @@ -0,0 +1,24 @@ +model: + class_path: anomalib.models.Stfpm + init_args: + backbone: resnet18 + layers: + - layer1 + - layer2 + - layer3 + +metrics: + pixel: + - AUROC + +trainer: + max_epochs: 100 + callbacks: + - class_path: lightning.pytorch.callbacks.EarlyStopping + init_args: + patience: 5 + monitor: pixel_AUROC + mode: max + - class_path: mart.utils.CallbackInstantiator + init_args: + cfg_path: ./anomalib_fgsm_linf_10.yaml From b85aaf1c3a09e5f9315da8213aa9f8b0a1874acb Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 12:14:37 -0700 Subject: [PATCH 22/36] Add README. --- examples/anomalib_adversary/README.md | 88 +++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 examples/anomalib_adversary/README.md diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md new file mode 100644 index 00000000..f826c225 --- /dev/null +++ b/examples/anomalib_adversary/README.md @@ -0,0 +1,88 @@ +# Adversarial Robustness of Anomaly Detection + +This project demonstrates how to generate adversarial examples against anomaly detection models in [Anomalib](https://github.com/openvinotoolkit/anomalib). + +## Installation + +Anomalib requires Python 3.10+. + +```sh +pip install -r requirements.txt +``` + +## Experiment + +0. \[Optional\] Soft link the existing datasets folder from Anomalib if you have downloaded datasets before with Anomalib. + +```sh +ln -s {PATH_TO_ANOMALIB_REPO}/datasets . +``` + +1. Train a model. The config file [configs/anomalib/stfpm.yaml](configs/anomalib/stfpm.yaml) adds an EarlyStopping Callback with maximal 100 epochs. + +```sh +CUDA_VISIBLE_DEVICES=0 anomalib train \ +--data anomalib.data.MVTec \ +--data.category transistor \ +--config configs/anomalib/stfpm.yaml +``` + +2. Evaluate the trained model without adversary as baseline. + +```sh +CUDA_VISIBLE_DEVICES=0 anomalib test \ +--data anomalib.data.MVTec \ +--data.category transistor \ +--config configs/anomalib/stfpm.yaml \ +--ckpt_path=results/Stfpm/MVTec/transistor/latest/weights/lightning/model.ckpt +``` + +```console +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ Test metric ┃ DataLoader 0 ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ image_AUROC │ 0.8733333349227905 │ +│ image_F1Score │ 0.7945205569267273 │ +│ pixel_AUROC │ 0.7860202789306641 │ +└───────────────────────────┴───────────────────────────┘ +``` + +2. Generate an adversary config from MART. + +```sh +python -m mart.generate_config \ +--config_dir="configs" \ +--export_node=callbacks.adversary_connector \ +--resolve=True \ +callbacks=adversary_connector \ +batch_c15n@callbacks.adversary_connector.batch_c15n=dict_imagenet_normalized \ +callbacks.adversary_connector.adversary=$\{attack\} \ ++attack=classification_fgsm_linf \ +~attack.gain \ ++attack.gain._target_=mart.nn.Get \ ++attack.gain.key=loss \ +attack.objective=null \ +attack.eps=10 \ +attack.callbacks.progress_bar.enable=true \ +> anomalib_fgsm_linf_10.yaml +``` + +3. Run attack. The config file [configs/anomalib/stfpm_mart.yaml](configs/anomalib/stfpm_mart.yaml) adds a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). + +```sh +CUDA_VISIBLE_DEVICES=0 anomalib test \ +--data anomalib.data.MVTec \ +--data.category transistor \ +--config configs/anomalib/stfpm_mart.yaml \ +--ckpt_path=results/Stfpm/MVTec/transistor/latest/weights/lightning/model.ckpt +``` + +```console +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ Test metric ┃ DataLoader 0 ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ image_AUROC │ 0.5979167222976685 │ +│ image_F1Score │ 0.5714285969734192 │ +│ pixel_AUROC │ 0.6867808699607849 │ +└───────────────────────────┴───────────────────────────┘ +``` From 3c0f172821de0b35be0930775e2874a12638a0c3 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 15 May 2024 12:19:00 -0700 Subject: [PATCH 23/36] Update README --- examples/anomalib_adversary/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md index f826c225..732dd2b4 100644 --- a/examples/anomalib_adversary/README.md +++ b/examples/anomalib_adversary/README.md @@ -47,7 +47,7 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ └───────────────────────────┴───────────────────────────┘ ``` -2. Generate an adversary config from MART. +3. Generate an adversary config from MART. ```sh python -m mart.generate_config \ @@ -67,7 +67,7 @@ attack.callbacks.progress_bar.enable=true \ > anomalib_fgsm_linf_10.yaml ``` -3. Run attack. The config file [configs/anomalib/stfpm_mart.yaml](configs/anomalib/stfpm_mart.yaml) adds a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). +4. Run attack. The config file [configs/anomalib/stfpm_mart.yaml](configs/anomalib/stfpm_mart.yaml) adds a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). ```sh CUDA_VISIBLE_DEVICES=0 anomalib test \ From 101940d4e04002eaf344fcf61de7e60da663964d Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 16 May 2024 08:45:15 -0700 Subject: [PATCH 24/36] Replace Anomalib config files with command lines. --- examples/anomalib_adversary/README.md | 22 +++++++++++------ .../configs/anomalib/stfpm.yaml | 21 ---------------- .../configs/anomalib/stfpm_mart.yaml | 24 ------------------- 3 files changed, 15 insertions(+), 52 deletions(-) delete mode 100644 examples/anomalib_adversary/configs/anomalib/stfpm.yaml delete mode 100644 examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md index 732dd2b4..3bcb6e28 100644 --- a/examples/anomalib_adversary/README.md +++ b/examples/anomalib_adversary/README.md @@ -18,13 +18,17 @@ pip install -r requirements.txt ln -s {PATH_TO_ANOMALIB_REPO}/datasets . ``` -1. Train a model. The config file [configs/anomalib/stfpm.yaml](configs/anomalib/stfpm.yaml) adds an EarlyStopping Callback with maximal 100 epochs. +1. Train a model. We add an EarlyStopping callback in command line. ```sh CUDA_VISIBLE_DEVICES=0 anomalib train \ --data anomalib.data.MVTec \ --data.category transistor \ ---config configs/anomalib/stfpm.yaml +--model Stfpm \ +--trainer.callbacks lightning.pytorch.callbacks.EarlyStopping \ +--trainer.callbacks.patience 5 \ +--trainer.callbacks.monitor pixel_AUROC \ +--trainer.callbacks.mode max ``` 2. Evaluate the trained model without adversary as baseline. @@ -33,7 +37,7 @@ CUDA_VISIBLE_DEVICES=0 anomalib train \ CUDA_VISIBLE_DEVICES=0 anomalib test \ --data anomalib.data.MVTec \ --data.category transistor \ ---config configs/anomalib/stfpm.yaml \ +--model Stfpm \ --ckpt_path=results/Stfpm/MVTec/transistor/latest/weights/lightning/model.ckpt ``` @@ -44,10 +48,11 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ │ image_AUROC │ 0.8733333349227905 │ │ image_F1Score │ 0.7945205569267273 │ │ pixel_AUROC │ 0.7860202789306641 │ +│ pixel_F1Score │ 0.46384868025779724 │ └───────────────────────────┴───────────────────────────┘ ``` -3. Generate an adversary config from MART. +3. Generate an adversary config file from MART. ```sh python -m mart.generate_config \ @@ -67,13 +72,15 @@ attack.callbacks.progress_bar.enable=true \ > anomalib_fgsm_linf_10.yaml ``` -4. Run attack. The config file [configs/anomalib/stfpm_mart.yaml](configs/anomalib/stfpm_mart.yaml) adds a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). +4. Run attack. We add a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). ```sh CUDA_VISIBLE_DEVICES=0 anomalib test \ --data anomalib.data.MVTec \ --data.category transistor \ ---config configs/anomalib/stfpm_mart.yaml \ +--model Stfpm \ +--trainer.callbacks mart.utils.CallbackInstantiator \ +--trainer.callbacks.cfg_path ./anomalib_fgsm_linf_10.yaml \ --ckpt_path=results/Stfpm/MVTec/transistor/latest/weights/lightning/model.ckpt ``` @@ -83,6 +90,7 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ │ image_AUROC │ 0.5979167222976685 │ │ image_F1Score │ 0.5714285969734192 │ -│ pixel_AUROC │ 0.6867808699607849 │ +│ pixel_AUROC │ 0.686780571937561 │ +│ pixel_F1Score │ 0.0955422893166542 │ └───────────────────────────┴───────────────────────────┘ ``` diff --git a/examples/anomalib_adversary/configs/anomalib/stfpm.yaml b/examples/anomalib_adversary/configs/anomalib/stfpm.yaml deleted file mode 100644 index c5e783ba..00000000 --- a/examples/anomalib_adversary/configs/anomalib/stfpm.yaml +++ /dev/null @@ -1,21 +0,0 @@ -model: - class_path: anomalib.models.Stfpm - init_args: - backbone: resnet18 - layers: - - layer1 - - layer2 - - layer3 - -metrics: - pixel: - - AUROC - -trainer: - max_epochs: 100 - callbacks: - - class_path: lightning.pytorch.callbacks.EarlyStopping - init_args: - patience: 5 - monitor: pixel_AUROC - mode: max diff --git a/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml b/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml deleted file mode 100644 index f83b1494..00000000 --- a/examples/anomalib_adversary/configs/anomalib/stfpm_mart.yaml +++ /dev/null @@ -1,24 +0,0 @@ -model: - class_path: anomalib.models.Stfpm - init_args: - backbone: resnet18 - layers: - - layer1 - - layer2 - - layer3 - -metrics: - pixel: - - AUROC - -trainer: - max_epochs: 100 - callbacks: - - class_path: lightning.pytorch.callbacks.EarlyStopping - init_args: - patience: 5 - monitor: pixel_AUROC - mode: max - - class_path: mart.utils.CallbackInstantiator - init_args: - cfg_path: ./anomalib_fgsm_linf_10.yaml From ed21be6ced76065edc10351a40efc9682f3c3116 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 16 May 2024 21:37:51 -0700 Subject: [PATCH 25/36] Add an empty configs folder, because mart.generate_config requires a local configs folder. --- examples/anomalib_adversary/configs/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 examples/anomalib_adversary/configs/.gitkeep diff --git a/examples/anomalib_adversary/configs/.gitkeep b/examples/anomalib_adversary/configs/.gitkeep new file mode 100644 index 00000000..e69de29b From f74c293a938f0c785bb62dbba410e53efd4b35ed Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Thu, 16 May 2024 21:41:47 -0700 Subject: [PATCH 26/36] Point dependency to MART@main. --- examples/anomalib_adversary/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/anomalib_adversary/requirements.txt b/examples/anomalib_adversary/requirements.txt index ae34b0f9..d7e474c8 100644 --- a/examples/anomalib_adversary/requirements.txt +++ b/examples/anomalib_adversary/requirements.txt @@ -1,2 +1,2 @@ anomalib[full] @ git+https://github.com/openvinotoolkit/anomalib.git@v1.0.1 -mart @ git+https://github.com/IntelLabs/MART@mzweilin/upgrade_lightning_2.1.4 +mart @ git+https://github.com/IntelLabs/MART@main From 061b50288a796902fe9e6041c9385caafbfe6fc9 Mon Sep 17 00:00:00 2001 From: Cory Cornelius Date: Fri, 17 May 2024 18:26:52 -0700 Subject: [PATCH 27/36] Parameterize module step function in AdversaryConnector --- mart/callbacks/adversary_connector.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mart/callbacks/adversary_connector.py b/mart/callbacks/adversary_connector.py index 006fbe1f..f5a95d13 100644 --- a/mart/callbacks/adversary_connector.py +++ b/mart/callbacks/adversary_connector.py @@ -62,6 +62,7 @@ def __init__( val_adversary: Callable = None, test_adversary: Callable = None, batch_c15n: Callable = None, + module_step_fn: str = "training_step", ): """A pl.Trainer callback which perturbs input to be adversarial in training/validation/test phase. @@ -77,6 +78,7 @@ def __init__( self.val_adversary = val_adversary or adversary self.test_adversary = test_adversary or adversary self.batch_c15n = batch_c15n + self.module_step_fn = module_step_fn def setup(self, trainer, pl_module, stage=None): self._on_after_batch_transfer = pl_module.on_after_batch_transfer @@ -123,7 +125,8 @@ def model(input, target): pl_module, excludes=["torch.nn.modules.dropout", "torch.nn.modules.batchnorm"], ): - outputs = pl_module.training_step(batch, dataloader_idx) + pl_module_step_fn = getattr(pl_module, self.module_step_fn) + outputs = pl_module_step_fn(batch, dataloader_idx) return outputs # Canonicalize the batch to work with Adversary. From ceb7ccbcdbb9bda76570868250c42b6293a42fdf Mon Sep 17 00:00:00 2001 From: Cory Cornelius Date: Fri, 17 May 2024 18:27:22 -0700 Subject: [PATCH 28/36] HACK: Gain function that clones it target --- mart/attack/gain.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mart/attack/gain.py b/mart/attack/gain.py index 93317dd7..9169fa8a 100644 --- a/mart/attack/gain.py +++ b/mart/attack/gain.py @@ -84,3 +84,10 @@ def forward(self, rpn_objectness: torch.Tensor) -> torch.Tensor: else: # Encourage foreground. return probs + + +from torch.nn import BCELoss as BCELoss_ +class BCELoss(BCELoss_): + def forward(self, input, target): + # We clone target because that tensor can be made in inference mode. + return super().forward(input, target.clone()) From 310186e98a301cf47fcc9c7b5f127ca2be58adcc Mon Sep 17 00:00:00 2001 From: Cory Cornelius Date: Fri, 17 May 2024 18:29:20 -0700 Subject: [PATCH 29/36] Update README with WinClip example --- examples/anomalib_adversary/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md index 3bcb6e28..17952953 100644 --- a/examples/anomalib_adversary/README.md +++ b/examples/anomalib_adversary/README.md @@ -52,6 +52,10 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ └───────────────────────────┴───────────────────────────┘ ``` +```sh +anomalib test --data anomalib.data.MVTec --data.category hazelnut --model WinClip --data.init_args.image_size 240 --data.init_args.eval_batch_size 16 "--metrics.pixel=[F1Score,AUROC]" +``` + 3. Generate an adversary config file from MART. ```sh @@ -72,6 +76,10 @@ attack.callbacks.progress_bar.enable=true \ > anomalib_fgsm_linf_10.yaml ``` +```sh +python -m mart.generate_config --config_dir="configs" --export_node=callbacks.adversary_connector --resolve=True callbacks=adversary_connector batch_c15n@callbacks.adversary_connector.batch_c15n=dict_imagenet_normalized callbacks.adversary_connector.adversary=$\{attack\} +attack=classification_fgsm_linf attack.objective=null attack.eps=10 attack.callbacks.progress_bar.enable=true +callbacks.adversary_connector.module_step_fn=validation_step attack.gain._call_with_args_.0=anomaly_maps attack.gain._call_with_args_.1=mask > anomalib_fgsm_linf_10.yaml +``` + 4. Run attack. We add a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). ```sh @@ -94,3 +102,7 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ │ pixel_F1Score │ 0.0955422893166542 │ └───────────────────────────┴───────────────────────────┘ ``` + +```sh +anomalib test --data anomalib.data.MVTec --data.category hazelnut --model WinClip --data.init_args.image_size 240 --data.init_args.eval_batch_size 16 "--metrics.pixel=[F1Score,AUROC]" --trainer.callbacks mart.utils.CallbackInstantiator --trainer.callbacks.cfg_path ./anomalib_fgsm_linf_10.yaml +``` From 187ac53c2e937aac902ee056f95cbb8729227474 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 10:15:47 -0700 Subject: [PATCH 30/36] Update MART dependency. --- examples/anomalib_adversary/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/anomalib_adversary/requirements.txt b/examples/anomalib_adversary/requirements.txt index d7e474c8..2fe868c7 100644 --- a/examples/anomalib_adversary/requirements.txt +++ b/examples/anomalib_adversary/requirements.txt @@ -1,2 +1,2 @@ anomalib[full] @ git+https://github.com/openvinotoolkit/anomalib.git@v1.0.1 -mart @ git+https://github.com/IntelLabs/MART@main +mart @ git+https://github.com/IntelLabs/MART@v0.6.1 From e0117ae8a07d242267e53cb443ff3abcf5b48c69 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 10:40:41 -0700 Subject: [PATCH 31/36] Mordenize packaging with pyproject.toml. --- examples/anomalib_adversary/pyproject.toml | 15 +++++++++++++++ examples/anomalib_adversary/requirements.txt | 2 -- examples/anomalib_adversary/setup.py | 8 -------- 3 files changed, 15 insertions(+), 10 deletions(-) create mode 100644 examples/anomalib_adversary/pyproject.toml delete mode 100644 examples/anomalib_adversary/requirements.txt delete mode 100644 examples/anomalib_adversary/setup.py diff --git a/examples/anomalib_adversary/pyproject.toml b/examples/anomalib_adversary/pyproject.toml new file mode 100644 index 00000000..8caea066 --- /dev/null +++ b/examples/anomalib_adversary/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "anomalib_adversary" +version = "0.1.0a" +description = "Evaluating robustness of anomaly detection models in Anomalib." +authors = [ + {name = "Intel Corporation"} +] + +dependencies = [ + "anomalib[full]==1.0.1", + "mart @ https://github.com/IntelLabs/MART/archive/refs/tags/v0.6.1.zip" +] + +[tool.setuptools.packages.find] +include = ["anomalib_adversary*"] diff --git a/examples/anomalib_adversary/requirements.txt b/examples/anomalib_adversary/requirements.txt deleted file mode 100644 index 2fe868c7..00000000 --- a/examples/anomalib_adversary/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -anomalib[full] @ git+https://github.com/openvinotoolkit/anomalib.git@v1.0.1 -mart @ git+https://github.com/IntelLabs/MART@v0.6.1 diff --git a/examples/anomalib_adversary/setup.py b/examples/anomalib_adversary/setup.py deleted file mode 100644 index d9986611..00000000 --- a/examples/anomalib_adversary/setup.py +++ /dev/null @@ -1,8 +0,0 @@ -import setuptools - -setuptools.setup( - name="anomalib_adversary", - version="0.1.0a", - description="Evaluating robustness of anomaly detection models in Anomalib.", - packages=["anomalib_adversary"], -) From 3b016d0a60f8c16da77a7b7bc6b8531fe664ab40 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 11:36:29 -0700 Subject: [PATCH 32/36] Revert "Parameterize module step function in AdversaryConnector" This reverts commit 061b50288a796902fe9e6041c9385caafbfe6fc9. --- mart/callbacks/adversary_connector.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mart/callbacks/adversary_connector.py b/mart/callbacks/adversary_connector.py index f5a95d13..006fbe1f 100644 --- a/mart/callbacks/adversary_connector.py +++ b/mart/callbacks/adversary_connector.py @@ -62,7 +62,6 @@ def __init__( val_adversary: Callable = None, test_adversary: Callable = None, batch_c15n: Callable = None, - module_step_fn: str = "training_step", ): """A pl.Trainer callback which perturbs input to be adversarial in training/validation/test phase. @@ -78,7 +77,6 @@ def __init__( self.val_adversary = val_adversary or adversary self.test_adversary = test_adversary or adversary self.batch_c15n = batch_c15n - self.module_step_fn = module_step_fn def setup(self, trainer, pl_module, stage=None): self._on_after_batch_transfer = pl_module.on_after_batch_transfer @@ -125,8 +123,7 @@ def model(input, target): pl_module, excludes=["torch.nn.modules.dropout", "torch.nn.modules.batchnorm"], ): - pl_module_step_fn = getattr(pl_module, self.module_step_fn) - outputs = pl_module_step_fn(batch, dataloader_idx) + outputs = pl_module.training_step(batch, dataloader_idx) return outputs # Canonicalize the batch to work with Adversary. From 4fb06906ecf4d639b4d6e82030727b5beb4da604 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 11:36:37 -0700 Subject: [PATCH 33/36] Revert "HACK: Gain function that clones it target" This reverts commit ceb7ccbcdbb9bda76570868250c42b6293a42fdf. --- mart/attack/gain.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/mart/attack/gain.py b/mart/attack/gain.py index 9169fa8a..93317dd7 100644 --- a/mart/attack/gain.py +++ b/mart/attack/gain.py @@ -84,10 +84,3 @@ def forward(self, rpn_objectness: torch.Tensor) -> torch.Tensor: else: # Encourage foreground. return probs - - -from torch.nn import BCELoss as BCELoss_ -class BCELoss(BCELoss_): - def forward(self, input, target): - # We clone target because that tensor can be made in inference mode. - return super().forward(input, target.clone()) From 75005395577c3424921da0ca53d5c9f32560f192 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 11:36:43 -0700 Subject: [PATCH 34/36] Revert "Update README with WinClip example" This reverts commit 310186e98a301cf47fcc9c7b5f127ca2be58adcc. --- examples/anomalib_adversary/README.md | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md index 17952953..3bcb6e28 100644 --- a/examples/anomalib_adversary/README.md +++ b/examples/anomalib_adversary/README.md @@ -52,10 +52,6 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ └───────────────────────────┴───────────────────────────┘ ``` -```sh -anomalib test --data anomalib.data.MVTec --data.category hazelnut --model WinClip --data.init_args.image_size 240 --data.init_args.eval_batch_size 16 "--metrics.pixel=[F1Score,AUROC]" -``` - 3. Generate an adversary config file from MART. ```sh @@ -76,10 +72,6 @@ attack.callbacks.progress_bar.enable=true \ > anomalib_fgsm_linf_10.yaml ``` -```sh -python -m mart.generate_config --config_dir="configs" --export_node=callbacks.adversary_connector --resolve=True callbacks=adversary_connector batch_c15n@callbacks.adversary_connector.batch_c15n=dict_imagenet_normalized callbacks.adversary_connector.adversary=$\{attack\} +attack=classification_fgsm_linf attack.objective=null attack.eps=10 attack.callbacks.progress_bar.enable=true +callbacks.adversary_connector.module_step_fn=validation_step attack.gain._call_with_args_.0=anomaly_maps attack.gain._call_with_args_.1=mask > anomalib_fgsm_linf_10.yaml -``` - 4. Run attack. We add a MART callback that loads the attack config file we just generated [./anomalib_fgsm_linf_10.yaml](./anomalib_fgsm_linf_10.yaml). ```sh @@ -102,7 +94,3 @@ CUDA_VISIBLE_DEVICES=0 anomalib test \ │ pixel_F1Score │ 0.0955422893166542 │ └───────────────────────────┴───────────────────────────┘ ``` - -```sh -anomalib test --data anomalib.data.MVTec --data.category hazelnut --model WinClip --data.init_args.image_size 240 --data.init_args.eval_batch_size 16 "--metrics.pixel=[F1Score,AUROC]" --trainer.callbacks mart.utils.CallbackInstantiator --trainer.callbacks.cfg_path ./anomalib_fgsm_linf_10.yaml -``` From 6cc6df6ab28519ff9f9f515748d34ca523b3e943 Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 11:39:08 -0700 Subject: [PATCH 35/36] Update README. --- examples/anomalib_adversary/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/anomalib_adversary/README.md b/examples/anomalib_adversary/README.md index 3bcb6e28..1432b107 100644 --- a/examples/anomalib_adversary/README.md +++ b/examples/anomalib_adversary/README.md @@ -7,7 +7,7 @@ This project demonstrates how to generate adversarial examples against anomaly d Anomalib requires Python 3.10+. ```sh -pip install -r requirements.txt +pip install -e . ``` ## Experiment From 26214600c403981ee9b1af53ef1994d20b179b7c Mon Sep 17 00:00:00 2001 From: Weilin Xu Date: Wed, 5 Jun 2024 13:57:49 -0700 Subject: [PATCH 36/36] Add a comma to make consistent lines. --- examples/anomalib_adversary/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/anomalib_adversary/pyproject.toml b/examples/anomalib_adversary/pyproject.toml index 8caea066..0444dabe 100644 --- a/examples/anomalib_adversary/pyproject.toml +++ b/examples/anomalib_adversary/pyproject.toml @@ -8,7 +8,7 @@ authors = [ dependencies = [ "anomalib[full]==1.0.1", - "mart @ https://github.com/IntelLabs/MART/archive/refs/tags/v0.6.1.zip" + "mart @ https://github.com/IntelLabs/MART/archive/refs/tags/v0.6.1.zip", ] [tool.setuptools.packages.find]