diff --git a/README.md b/README.md
index 0562174b..5e734982 100644
--- a/README.md
+++ b/README.md
@@ -80,6 +80,7 @@ pip install hyperactive
```bash
pip install hyperactive[sklearn-integration] # scikit-learn integration
pip install hyperactive[sktime-integration] # sktime/skpro integration
+pip install hyperactive[lipo-integration] # lipo global optimizer
pip install hyperactive[all_extras] # Everything including Optuna
```
@@ -111,7 +112,7 @@ pip install hyperactive[all_extras] # Everything including Optuna
Multiple Backends
- GFO algorithms, Optuna samplers, and sklearn search methods through one unified API.
+ GFO algorithms, Optuna samplers, sklearn search methods, and lipo's parameter-free global optimizer through one unified API.
|
Stable & Tested
@@ -177,13 +178,13 @@ flowchart TB
GFO["GFO 21 algorithms"]
OPTUNA["Optuna 8 algorithms"]
SKL["sklearn 2 algorithms"]
- MORE["... more to come"]
+ LIPO["LIPO 1 algorithm"]
end
OPT --> GFO
OPT --> OPTUNA
OPT --> SKL
- OPT --> MORE
+ OPT --> LIPO
end
subgraph OUT["Output"]
@@ -366,6 +367,34 @@ best_params = optimizer.solve()
+
+LIPO Global Optimizer
+
+```python
+import numpy as np
+from hyperactive.opt.lipo import LIPOOptimizer
+
+def objective(params):
+ x, y = params["x"], params["y"]
+ return -(x**2 + y**2)
+
+search_space = {
+ "x": np.arange(-5, 5, 0.1),
+ "y": np.arange(-5, 5, 0.1),
+}
+
+optimizer = LIPOOptimizer(
+ search_space=search_space,
+ n_iter=100,
+ experiment=objective,
+)
+best_params = optimizer.solve()
+```
+
+
+
+
+
Time Series Forecasting with sktime
@@ -508,4 +537,4 @@ If you use this software in your research, please cite:
## License
-[MIT License](./LICENSE) - Free for commercial and academic use.
+[MIT License](./LICENSE) - Free for commercial and academic use.
\ No newline at end of file
diff --git a/examples/lipo/lipo_examples.py b/examples/lipo/lipo_examples.py
new file mode 100644
index 00000000..98e0d296
--- /dev/null
+++ b/examples/lipo/lipo_examples.py
@@ -0,0 +1,18 @@
+import numpy as np
+from hyperactive.opt.lipo import LIPOOptimizer
+
+
+def objective(params):
+ x, y = params["x"], params["y"]
+ return -(x ** 2 + y ** 2) # max at (0, 0)
+
+
+opt = LIPOOptimizer(
+ search_space={
+ "x": np.arange(-5, 5, 0.1),
+ "y": np.arange(-5, 5, 0.1),
+ },
+ n_iter=100,
+ experiment=objective,
+)
+print(opt.solve()) # {'x': ~0.0, 'y': ~0.0}
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index f77e7362..a145f4a1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,6 +99,7 @@ all_extras = [
"optuna<5",
"cmaes", # Required for CmaEsOptimizer (optuna's CMA-ES sampler)
"lightning",
+ "lipo",
]
diff --git a/src/hyperactive/opt/__init__.py b/src/hyperactive/opt/__init__.py
index da303a23..4daa4ebc 100644
--- a/src/hyperactive/opt/__init__.py
+++ b/src/hyperactive/opt/__init__.py
@@ -4,6 +4,7 @@
from hyperactive.opt.gridsearch import GridSearchSk
from hyperactive.opt.random_search import RandomSearchSk
+from .lipo import LIPOOptimizer
from .gfo import (
BayesianOptimizer,
diff --git a/src/hyperactive/opt/lipo.py b/src/hyperactive/opt/lipo.py
new file mode 100644
index 00000000..51cb7f10
--- /dev/null
+++ b/src/hyperactive/opt/lipo.py
@@ -0,0 +1,54 @@
+import numpy as np
+from lipo import GlobalOptimizer
+
+class LIPOOptimizer:
+ """Parameter-free global optimizer via the lipo package."""
+
+ def __init__(self, search_space, n_iter, experiment, maximize=True):
+ self.search_space = search_space
+ self.n_iter = n_iter
+ self.experiment = experiment
+ self.maximize = maximize
+
+ def _parse_search_space(self):
+ lower, upper, cats = {}, {}, {}
+ for key, values in self.search_space.items():
+ # Categorical: list of strings
+ if isinstance(values, list) and isinstance(values[0], str):
+ cats[key] = values
+ else:
+ arr = np.array(values)
+ lower[key] = float(arr.min())
+ upper[key] = float(arr.max())
+ # Store grid so we can snap results back later
+ self._grids = getattr(self, "_grids", {})
+ self._grids[key] = arr
+ return lower, upper, cats
+
+ def _snap_to_grid(self, params):
+ """Snap lipo's continuous output to nearest valid grid point."""
+ snapped = {}
+ for key, val in params.items():
+ if key in getattr(self, "_grids", {}):
+ grid = self._grids[key]
+ snapped[key] = grid[np.argmin(np.abs(grid - val))]
+ else:
+ snapped[key] = val # categorical, pass through
+ return snapped
+
+ def solve(self):
+ lower, upper, cats = self._parse_search_space()
+
+ def wrapped(**kwargs):
+ return self.experiment(self._snap_to_grid(kwargs))
+
+ opt = GlobalOptimizer(
+ wrapped,
+ lower_bounds=lower,
+ upper_bounds=upper,
+ categories=cats,
+ maximize=self.maximize,
+ )
+ opt.run(self.n_iter)
+
+ return self._snap_to_grid(opt.optimum[0])
\ No newline at end of file
diff --git a/src/hyperactive/tests/test_lipo.py b/src/hyperactive/tests/test_lipo.py
new file mode 100644
index 00000000..1f7adfa8
--- /dev/null
+++ b/src/hyperactive/tests/test_lipo.py
@@ -0,0 +1,41 @@
+import numpy as np
+import pytest
+from hyperactive.opt.lipo import LIPOOptimizer
+
+
+def sphere(params):
+ return -(params["x"] ** 2 + params["y"] ** 2)
+
+
+def test_lipo_basic():
+ opt = LIPOOptimizer(
+ search_space={
+ "x": np.arange(-5, 5, 0.1),
+ "y": np.arange(-5, 5, 0.1),
+ },
+ n_iter=50,
+ experiment=sphere,
+ )
+ best = opt.solve()
+ assert "x" in best and "y" in best
+ assert abs(best["x"]) < 1.5 # should be near 0
+
+
+def test_lipo_categorical():
+ def fn(p): return 1.0 if p["kernel"] == "rbf" else 0.0
+ opt = LIPOOptimizer(
+ search_space={"kernel": ["linear", "rbf", "poly"]},
+ n_iter=20, experiment=fn,
+ )
+ best = opt.solve()
+ assert best["kernel"] == "rbf"
+
+
+def test_lipo_snap_to_grid():
+ def fn(p): return -abs(p["x"] - 3)
+ opt = LIPOOptimizer(
+ search_space={"x": np.array([1, 2, 3, 4, 5])},
+ n_iter=30, experiment=fn,
+ )
+ best = opt.solve()
+ assert best["x"] in [1, 2, 3, 4, 5] # must be on the grid
\ No newline at end of file
|