Skip to content

Commit

Permalink
add optlib="cmaes" and plot_parameters_parallel
Browse files Browse the repository at this point in the history
  • Loading branch information
jcmgray committed Aug 9, 2024
1 parent e19e7db commit 984c8da
Show file tree
Hide file tree
Showing 19 changed files with 2,683 additions and 1,246 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
python-version: ['3.8', '3.9', '3.10', '3.11']
python-version: ['3.9', '3.10', '3.11', '3.12']
env: [base]
include:
- os: macos-latest
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py-base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
dependencies:
- autoray
- cmaes
- coverage
- matplotlib
- networkx
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py-mac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
dependencies:
- autoray
- cmaes
- coverage
- matplotlib
- networkx
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py-no-oe.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
dependencies:
- autoray
- cmaes
- coverage
- matplotlib
- networkx
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py-win.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
dependencies:
- autoray
- cmaes
- coverage
- matplotlib
- networkx
Expand Down
2 changes: 2 additions & 0 deletions cotengra/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from .hyperoptimizers import (
hyper_baytune,
hyper_choco,
hyper_cmaes,
hyper_nevergrad,
hyper_optuna,
hyper_random,
Expand Down Expand Up @@ -151,6 +152,7 @@
"hash_contraction",
"hyper_baytune",
"hyper_choco",
"hyper_cmaes",
"hyper_nevergrad",
"hyper_optimize",
"hyper_optuna",
Expand Down
12 changes: 6 additions & 6 deletions cotengra/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2954,29 +2954,29 @@ def print_contractions(self, sort=None, show_brackets=True):
type_msg = "einsum"

pa = "".join(
PINK + f"({ix})"
PINK + f"{{{ix}}}"
if (ix in l_legs) and (ix in r_legs)
else GREEN + f"({ix})"
if ix in r_legs
else BLUE + ix
for ix in p_inds
).replace(f"){GREEN}(", "")
).replace(f"){GREEN}(", "").replace(f"}}{PINK}{{", "")
la = "".join(
PINK + f"[{ix}]"
PINK + f"{{{ix}}}"
if (ix in p_legs) and (ix in r_legs)
else RED + f"[{ix}]"
if ix in r_legs
else BLUE + ix
for ix in l_inds
).replace(f"]{RED}[", "")
).replace(f"]{RED}[", "").replace(f"}}{PINK}{{", "")
ra = "".join(
PINK + f"[{ix}]"
PINK + f"{{{ix}}}"
if (ix in p_legs) and (ix in l_legs)
else RED + f"[{ix}]"
if ix in l_legs
else GREEN + ix
for ix in r_inds
).replace(f"]{RED}[", "")
).replace(f"]{RED}[", "").replace(f"}}{PINK}{{", "")

entries.append(
(
Expand Down
45 changes: 34 additions & 11 deletions cotengra/hyperoptimizers/hyper.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,13 @@
from ..core_multi import ContractionTreeMulti
from ..oe import PathOptimizer
from ..parallel import get_n_workers, parse_parallel_arg, should_nest, submit
from ..plot import plot_scatter, plot_scatter_alt, plot_trials, plot_trials_alt
from ..plot import (
plot_parameters_parallel,
plot_scatter,
plot_scatter_alt,
plot_trials,
plot_trials_alt,
)
from ..scoring import get_score_fn
from ..utils import BadTrial, DiskDict, get_rng

Expand All @@ -37,23 +43,39 @@ def get_default_hq_methods():
return tuple(methods)


@functools.lru_cache(maxsize=None)
def get_default_optlib_eco():
"""Get the default optimizer favoring speed."""
if importlib.util.find_spec("cmaes"):
optlib = "cmaes"
elif importlib.util.find_spec("nevergrad"):
optlib = "nevergrad"
elif importlib.util.find_spec("optuna"):
optlib = "optuna"
else:
optlib = "random"
warnings.warn(
"Couldn't find `optuna`, `cmaes`, `baytune (btb)`, `chocolate`, "
"or `nevergrad` so will use completely random "
"sampling in place of hyper-optimization."
)
return optlib


@functools.lru_cache(maxsize=None)
def get_default_optlib():
"""Get the default optimizer balancing quality and speed."""
if importlib.util.find_spec("optuna"):
optlib = "optuna"
elif importlib.util.find_spec("btb"):
optlib = "baytune"
elif importlib.util.find_spec("chocolate"):
optlib = "chocolate"
elif importlib.util.find_spec("cmaes"):
optlib = "cmaes"
elif importlib.util.find_spec("nevergrad"):
optlib = "nevergrad"
elif importlib.util.find_spec("skopt"):
optlib = "skopt"
else:
optlib = "random"
warnings.warn(
"Couldn't find `optuna`, `baytune (btb)`, `chocolate`, "
"`nevergrad` or `skopt` so will use completely random "
"Couldn't find `optuna`, `cmaes`, `baytune (btb)`, `chocolate`, "
"or `nevergrad` so will use completely random "
"sampling in place of hyper-optimization."
)
return optlib
Expand Down Expand Up @@ -249,7 +271,7 @@ def __call__(self, *args, **kwargs):


class CompressedReconfTrial:
def __init__(self, trial_fn, minimize, **opts):
def __init__(self, trial_fn, minimize=None, **opts):
self.trial_fn = trial_fn
self.minimize = minimize
self.opts = opts
Expand Down Expand Up @@ -367,7 +389,7 @@ class HyperOptimizer(PathOptimizer):
If supplied, once a trial contraction path is found, try subtree
reconfiguation with the given options, and then update the flops and
size of the trial with the reconfigured versions.
optlib : {'baytune', 'nevergrad', 'chocolate', 'skopt'}, optional
optlib : {'optuna', 'cmaes', 'nevergrad', 'skopt', ...}, optional
Which optimizer to sample and train with.
space : dict, optional
The hyper space to search, see ``get_hyper_space`` for the default.
Expand Down Expand Up @@ -838,6 +860,7 @@ def to_dfs_parametrized(self):
plot_trials_alt = plot_trials_alt
plot_scatter = plot_scatter
plot_scatter_alt = plot_scatter_alt
plot_parameters_parallel = plot_parameters_parallel


def sortedtuple(x):
Expand Down
Loading

0 comments on commit 984c8da

Please sign in to comment.