Skip to content
Open
37 changes: 33 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ pip install hyperactive
```bash
pip install hyperactive[sklearn-integration] # scikit-learn integration
pip install hyperactive[sktime-integration] # sktime/skpro integration
pip install hyperactive[lipo-integration] # lipo global optimizer
pip install hyperactive[all_extras] # Everything including Optuna
```

Expand Down Expand Up @@ -111,7 +112,7 @@ pip install hyperactive[all_extras] # Everything including Optuna
</td>
<td width="33%">
<a href="https://hyperactive.readthedocs.io/en/latest/user_guide/optimizers/optuna.html"><b>Multiple Backends</b></a><br>
<sub>GFO algorithms, Optuna samplers, and sklearn search methods through one unified API.</sub>
<sub>GFO algorithms, Optuna samplers, sklearn search methods, and lipo's parameter-free global optimizer through one unified API.</sub>
</td>
<td width="33%">
<a href="https://hyperactive.readthedocs.io/en/latest/api_reference.html"><b>Stable & Tested</b></a><br>
Expand Down Expand Up @@ -177,13 +178,13 @@ flowchart TB
GFO["GFO<br/>21 algorithms"]
OPTUNA["Optuna<br/>8 algorithms"]
SKL["sklearn<br/>2 algorithms"]
MORE["...<br/>more to come"]
LIPO["LIPO<br/>1 algorithm"]
end

OPT --> GFO
OPT --> OPTUNA
OPT --> SKL
OPT --> MORE
OPT --> LIPO
end

subgraph OUT["Output"]
Expand Down Expand Up @@ -366,6 +367,34 @@ best_params = optimizer.solve()



<details>
<summary><b>LIPO Global Optimizer</b></summary>

```python
import numpy as np
from hyperactive.opt.lipo import LIPOOptimizer

def objective(params):
x, y = params["x"], params["y"]
return -(x**2 + y**2)

search_space = {
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
}

optimizer = LIPOOptimizer(
search_space=search_space,
n_iter=100,
experiment=objective,
)
best_params = optimizer.solve()
```

</details>



<details>
<summary><b>Time Series Forecasting with sktime</b></summary>

Expand Down Expand Up @@ -508,4 +537,4 @@ If you use this software in your research, please cite:

## License

[MIT License](./LICENSE) - Free for commercial and academic use.
[MIT License](./LICENSE) - Free for commercial and academic use.
18 changes: 18 additions & 0 deletions examples/lipo/lipo_examples.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import numpy as np
from hyperactive.opt.lipo import LIPOOptimizer


def objective(params):
x, y = params["x"], params["y"]
return -(x ** 2 + y ** 2) # max at (0, 0)


opt = LIPOOptimizer(
search_space={
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
},
n_iter=100,
experiment=objective,
)
print(opt.solve()) # {'x': ~0.0, 'y': ~0.0}
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ all_extras = [
"optuna<5",
"cmaes", # Required for CmaEsOptimizer (optuna's CMA-ES sampler)
"lightning",
"lipo",
]


Expand Down
1 change: 1 addition & 0 deletions src/hyperactive/opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from hyperactive.opt.gridsearch import GridSearchSk
from hyperactive.opt.random_search import RandomSearchSk
from .lipo import LIPOOptimizer

from .gfo import (
BayesianOptimizer,
Expand Down
54 changes: 54 additions & 0 deletions src/hyperactive/opt/lipo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import numpy as np
from lipo import GlobalOptimizer

class LIPOOptimizer:
"""Parameter-free global optimizer via the lipo package."""

def __init__(self, search_space, n_iter, experiment, maximize=True):
self.search_space = search_space
self.n_iter = n_iter
self.experiment = experiment
self.maximize = maximize

def _parse_search_space(self):
lower, upper, cats = {}, {}, {}
for key, values in self.search_space.items():
# Categorical: list of strings
if isinstance(values, list) and isinstance(values[0], str):
cats[key] = values
else:
arr = np.array(values)
lower[key] = float(arr.min())
upper[key] = float(arr.max())
# Store grid so we can snap results back later
self._grids = getattr(self, "_grids", {})
self._grids[key] = arr
return lower, upper, cats

def _snap_to_grid(self, params):
"""Snap lipo's continuous output to nearest valid grid point."""
snapped = {}
for key, val in params.items():
if key in getattr(self, "_grids", {}):
grid = self._grids[key]
snapped[key] = grid[np.argmin(np.abs(grid - val))]
else:
snapped[key] = val # categorical, pass through
return snapped

def solve(self):
lower, upper, cats = self._parse_search_space()

def wrapped(**kwargs):
return self.experiment(self._snap_to_grid(kwargs))

opt = GlobalOptimizer(
wrapped,
lower_bounds=lower,
upper_bounds=upper,
categories=cats,
maximize=self.maximize,
)
opt.run(self.n_iter)

return self._snap_to_grid(opt.optimum[0])
41 changes: 41 additions & 0 deletions src/hyperactive/tests/test_lipo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import numpy as np
import pytest
from hyperactive.opt.lipo import LIPOOptimizer


def sphere(params):
return -(params["x"] ** 2 + params["y"] ** 2)


def test_lipo_basic():
opt = LIPOOptimizer(
search_space={
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
},
n_iter=50,
experiment=sphere,
)
best = opt.solve()
assert "x" in best and "y" in best
assert abs(best["x"]) < 1.5 # should be near 0


def test_lipo_categorical():
def fn(p): return 1.0 if p["kernel"] == "rbf" else 0.0
opt = LIPOOptimizer(
search_space={"kernel": ["linear", "rbf", "poly"]},
n_iter=20, experiment=fn,
)
best = opt.solve()
assert best["kernel"] == "rbf"


def test_lipo_snap_to_grid():
def fn(p): return -abs(p["x"] - 3)
opt = LIPOOptimizer(
search_space={"x": np.array([1, 2, 3, 4, 5])},
n_iter=30, experiment=fn,
)
best = opt.solve()
assert best["x"] in [1, 2, 3, 4, 5] # must be on the grid