From 62122b5add8d6892f70c82eaef2147a6ba33b90b Mon Sep 17 00:00:00 2001 From: Benjamin Bossan Date: Mon, 1 Jul 2024 15:42:10 +0200 Subject: [PATCH] FIX TEST Higher tolerance for AdaLoRA in test (#1897) The test is flaky on CI, so this PR increases the tolerance to hopefully fix the flakines. I cannot reproduce the error locally (neither on GPU nor CPU), so I'm not 100% sure if this tolerance is enough to make the test reliable. --- tests/testing_common.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/testing_common.py b/tests/testing_common.py index 83ad81240a..6368b6c7e4 100644 --- a/tests/testing_common.py +++ b/tests/testing_common.py @@ -567,6 +567,9 @@ def _test_merge_layers(self, model_id, config_cls, config_kwargs): atol, rtol = 1e-4, 1e-4 if self.torch_device in ["mlu"]: atol, rtol = 1e-3, 1e-3 # MLU + if config.peft_type == "ADALORA": + # AdaLoRA is a bit flaky on CI, but this cannot be reproduced locally + atol, rtol = 1e-3, 1e-3 if (config.peft_type == "IA3") and (model_id == "Conv2d"): # for some reason, the IA³ Conv2d introduces a larger error atol, rtol = 0.3, 0.01