Skip to content

Commit

Permalink
MNT Upgrade ruff version to ~0.4.8 (#1851)
Browse files Browse the repository at this point in the history
We currently use ruff v0.2.2, which is quite far behind the latest
version. This has the disadvantage that new contributors will often
install the latest version of ruff and then get CI errors, even though
they ran `make style`.

Here is the full list of changes:

- bump ruff version to ~0.4.8
- update the ruff commands in Makefile (ruff foo/ -> ruff check foo/)
- update coding style of two files that changed with the new ruff
  version
  • Loading branch information
BenjaminBossan committed Jun 12, 2024
1 parent b6af7fe commit 8843a76
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 19 deletions.
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ check_dirs := src tests examples docs scripts docker

# this target runs checks on all files
quality:
ruff $(check_dirs)
ruff check $(check_dirs)
ruff format --check $(check_dirs)
doc-builder style src/peft tests docs/source --max_len 119 --check_only

# Format source code automatically and check is there are any problems left that need manual fixing
style:
ruff $(check_dirs) --fix
ruff check --fix $(check_dirs)
ruff format $(check_dirs)
doc-builder style src/peft tests docs/source --max_len 119

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
extras["quality"] = [
"black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434
"hf-doc-builder",
"ruff~=0.2.1",
"ruff~=0.4.8",
]
extras["docs_specific"] = [
"black", # doc-builder has an implicit dependency on Black, see huggingface/doc-builder#434
Expand Down
18 changes: 6 additions & 12 deletions src/peft/tuners/lycoris_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,7 @@ def __init__(self, base_layer: nn.Module) -> None:

@property
@abstractmethod
def _available_adapters(self) -> set[str]:
...
def _available_adapters(self) -> set[str]: ...

def _init_empty_weights(self, cls, *args, **kwargs) -> None:
# A helper method that allows to initialize the layer of the given class without spending time to initialize the
Expand All @@ -95,17 +94,15 @@ def _init_empty_weights(self, cls, *args, **kwargs) -> None:
self.to_empty(device=final_device)

@abstractmethod
def create_adapter_parameters(self, adapter_name: str, r: int, **kwargs):
...
def create_adapter_parameters(self, adapter_name: str, r: int, **kwargs): ...

# TODO: refactor LoRA to use the same approach
@abstractmethod
def _get_delta_activations(self, adapter_name: str, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:
"""Activations added on top of the base layer output (i.e. after the base layer forward pass)"""

@abstractmethod
def get_delta_weight(self, adapter_name: str) -> torch.Tensor:
...
def get_delta_weight(self, adapter_name: str) -> torch.Tensor: ...

def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
"""
Expand Down Expand Up @@ -143,8 +140,7 @@ def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = N
self.merged_adapters.append(active_adapter)

@abstractmethod
def reset_adapter_parameters(self, adapter_name: str):
...
def reset_adapter_parameters(self, adapter_name: str): ...

def set_scale(self, adapter, scale):
if adapter not in self._available_adapters:
Expand Down Expand Up @@ -185,8 +181,7 @@ def unscale_layer(self, scale=None) -> None:
self.scaling[active_adapter] /= scale

@abstractmethod
def update_layer(self, adapter_name: str, r: int, alpha: float, **kwargs):
...
def update_layer(self, adapter_name: str, r: int, alpha: float, **kwargs): ...


class LycorisTuner(BaseTuner):
Expand Down Expand Up @@ -220,8 +215,7 @@ def _create_and_replace(
target_name,
parent,
current_key,
):
...
): ...

@classmethod
def _create_new_module(cls, config: LycorisConfig, adapter_name: str, target: nn.Module, **kwargs) -> LycorisLayer:
Expand Down
6 changes: 2 additions & 4 deletions src/peft/tuners/poly/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,10 @@ def get_router(poly_config: PolyConfig) -> nn.Module:

class Router(nn.Module, ABC):
@abstractmethod
def reset(self):
...
def reset(self): ...

@abstractmethod
def forward(self, task_ids: torch.Tensor, input_ids: torch.Tensor):
...
def forward(self, task_ids: torch.Tensor, input_ids: torch.Tensor): ...


class PolyRouter(Router):
Expand Down

0 comments on commit 8843a76

Please sign in to comment.