Skip to content

Commit

Permalink
setting up the ci-cd correctly
Browse files Browse the repository at this point in the history
  • Loading branch information
Jorgedavyd committed Jun 2, 2024
1 parent b09f71c commit 833de9a
Show file tree
Hide file tree
Showing 15 changed files with 731 additions and 335 deletions.
17 changes: 5 additions & 12 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,15 @@ jobs:
python-version: [3.8, 3.9, 3.11, 3.12]
include:
- python-version: 3.12
commit: true
commit: false
- python-version: 3.8
commit: false
- python-version: 3.9
commit: false
- python-version: 3.11
commit: false
- python-version: 3.10
commit: true

steps:
- name: Checkout repository
Expand All @@ -32,29 +34,20 @@ jobs:
with:
python-version: ${{ matrix.python-version }}

- name: Set up dependencies
run: |
python -m pip install --upgrade pip
pip install pipreqs pip-tools
chmod +x requirements.sh
sh ./requirements.sh
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pytest black
pip install -r requirements.txt
- name: Run tests
run: |
pytest tests/
- name: Run black
run: |
black .
- name: Commit to repo
if: matrix.commit == true
run: |
black .
git config --global user.name 'Jorgedavyd'
git config --global user.email 'jorged.encyso@gmail.com'
git diff --exit-code || (git add . && git commit -m "Automatically formatted with black" && git push)
Expand Down
4 changes: 2 additions & 2 deletions docs/api/htuning.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ if __name__ == '__main__':
model_class = FourierVAE,
hparam_objective = objective,
datamodule = NormalModule,
valid_metrics = [f"Training/{name}" for name in [
valid_metrics = [
"Pixel",
"Perceptual",
"Style",
"Total variance",
"KL Divergence"]],
"KL Divergence"],
directions = ['minimize', 'minimize', 'minimize', 'minimize', 'minimize'],
precision = 'medium',
n_trials = 150,
Expand Down
1 change: 0 additions & 1 deletion docs/api/transformer.md

This file was deleted.

21 changes: 14 additions & 7 deletions lightorch/htuning/optuna.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,20 @@ def objective(trial: optuna.trial.Trial):
trainer.fit(model, datamodule=dataset)

if isinstance(valid_metrics, str):
return trainer.callback_metrics[valid_metrics].item()

return (
trainer.callback_metrics[valid_metric].item()
for valid_metric in valid_metrics
)

if valid_metrics == 'hp_metric':
return trainer.callback_metrics[valid_metrics].item()
return trainer.callback_metrics[f'Training/{valid_metrics}'].item()

else:
out = []
for valid_metric in valid_metrics:
if valid_metric == 'hp_metric':
out.append(trainer.callback_metrics[valid_metric].item())
else:
out.append(trainer.callback_metrics[f'Training/{valid_metric}'].item())

return out

if "precision" in kwargs:
torch.set_float32_matmul_precision(precision)
else:
Expand Down
10 changes: 4 additions & 6 deletions lightorch/nn/criterions.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,23 +27,21 @@ def __init__(

class Loss(LighTorchLoss):
def __init__(self, *loss) -> None:
assert (len(set(map(type, loss))) == len(loss)), 'Not valid input classes, each should be different.'
super().__init__(
list(set([*chain.from_iterable([i.labels for i in loss])])),
_merge_dicts([i.factors for i in loss]),
)
assert len(loss) == len(
self.factors
), "Must have the same length of losses as factors"
self.loss = loss

def forward(self, **kwargs) -> Tuple[Tensor, ...]:
loss_ = 0
out_list = []

for loss in self.loss:
*loss_arg, out_ = loss(**kwargs)
out_list.extend(list(*loss_arg))
loss_ += out_
args = loss(**kwargs)
out_list.extend(list(args[:-1]))
loss_ += args[-1]

out_list.append(loss_)

Expand Down
89 changes: 4 additions & 85 deletions lightorch/training/adversarial.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,14 @@
from typing import Union, Sequence, Any, Tuple, Dict
from torch import Tensor, nn
from typing import Any, Dict
from torch.optim import Optimizer
from torch.optim.lr_scheduler import LRScheduler
from collections import defaultdict
import torch
from torch.optim import Adam, Adadelta, Adamax, AdamW, SGD, LBFGS, RMSprop
from .supervised import Module as Module_
from torch.optim.lr_scheduler import (
OneCycleLR,
ReduceLROnPlateau,
ExponentialLR,
LinearLR,
)
from torch import Tensor
import torchvision

VALID_OPTIMIZERS = {
"adam": Adam,
"adadelta": Adadelta,
"adamax": Adamax,
"adamw": AdamW,
"sgd": SGD,
"lbfgs": LBFGS,
"rms": RMSprop,
}

VALID_SCHEDULERS = {
"onecycle": OneCycleLR,
"plateau": ReduceLROnPlateau,
"exponential": ExponentialLR,
"linear": LinearLR,
}


def interval(algo: LRScheduler) -> str:
if isinstance(algo, OneCycleLR):
return "step"
else:
return "epoch"


class Module(Module_):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
def __init__(self, *, optimizer: str | Optimizer, scheduler: str | LRScheduler = None, triggers: Dict[str, Dict[str, float]] = None, optimizer_kwargs: Dict[str, Any] = None, scheduler_kwargs: Dict[str, Any] = None, gradient_clip_algorithm: str = None, gradient_clip_val: float = None) -> None:
super().__init__(optimizer=optimizer, scheduler=scheduler, triggers=triggers, optimizer_kwargs=optimizer_kwargs, scheduler_kwargs=scheduler_kwargs, gradient_clip_algorithm=gradient_clip_algorithm, gradient_clip_val=gradient_clip_val)
self.automatic_optimization = False

def validation_step(self) -> None:
Expand Down Expand Up @@ -90,53 +57,5 @@ def training_step(self, batch: Tensor, idx: int) -> Tensor:
opt_d.zero_grad()
self.untoggle_optimizer(opt_d)

def get_param_groups(self, *triggers) -> Tuple:
"""
Given a list of "triggers", the param groups are defined.
"""

param_groups: Sequence[Dict[str, Sequence[nn.Module]]] = [
defaultdict(list) * len(triggers)
]

for param_group, trigger in zip(param_groups, triggers):
for name, param in self.named_modules():
if name.startswith(trigger):
param_group["params"].append(param)

return param_groups

def _configure_optimizer(self) -> Optimizer:
optimizer_args: Dict[str, Union[float, nn.Module]] = []
for hparam, param_group in zip(
self.get_hparams(), self.get_param_groups(*self.triggers)
):
optimizer_args.append(param_group.update(hparam))
optimizer = VALID_OPTIMIZERS[self.optimizer](optimizer_args)
return optimizer

def _configure_scheduler(self, optimizer: Optimizer) -> LRScheduler:
if self.scheduler == "onecycle":
return VALID_SCHEDULERS[self.scheduler](
optimizer,
**self.scheduler_kwargs.update(
{"total_steps": self.trainer.estimated_stepping_batches}
)
)
else:
return VALID_SCHEDULERS[self.scheduler](optimizer, **self.scheduler_kwargs)

def configure_optimizers(self) -> Optimizer | Sequence[Optimizer]:
optimizer = self._configure_optimizer()
scheduler = self._configure_scheduler(optimizer)
return {
"optimizer": optimizer,
"lr_scheduler": {
"scheduler": scheduler,
"interval": interval(optimizer),
"frequency": 1,
},
}


__all__ = ["Module"]
4 changes: 2 additions & 2 deletions lightorch/training/cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from lightning.pytorch import LightningDataModule
import torch
from lightning.pytorch.cli import LightningCLI
import torch


def trainer(
Expand All @@ -15,6 +14,7 @@ def trainer(
trainer_defaults={
"deterministic": deterministic,
},

)


Expand Down
Loading

0 comments on commit 833de9a

Please sign in to comment.