Skip to content

Commit 83384f1

Browse files
committed
new rules
1 parent bfa752d commit 83384f1

File tree

14 files changed

+344
-0
lines changed

14 files changed

+344
-0
lines changed

LICENSE

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
MIT License
2+
3+
Copyright (c) 2025 itbert
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in all
13+
copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
SOFTWARE.

README.md

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
# hyperboost
2+
3+
Подходит как для исследовательских задач, так и для практического использования в реальных проектах.
4+
5+
6+
## ✅ Поддерживаемые методы
7+
8+
| Метод | Описание |
9+
|------------------------|----------|
10+
| `BayesianOptimizer` | Байесовская оптимизация с использованием Gaussian Process |
11+
| `EvolutionaryOptimizer` | Простой генетический алгоритм (GA) |
12+
13+
14+
## 📦 Установка
15+
16+
```bash
17+
pip install hyperoptlib
18+
```
19+
20+
## 🧪 Пример использования
21+
22+
```python
23+
from sklearn.datasets import load_iris
24+
from sklearn.ensemble import RandomForestClassifier
25+
from sklearn.model_selection import train_test_split
26+
from hyperboost.space import SearchSpace
27+
from hyperboost.space import Real, Integer, Categorical
28+
from hyperboost.optimizers import BayesianOptimizer
29+
from hyperboost.optimizers import EvolutionaryOptimizer
30+
31+
X, y = load_iris(return_X_y=True)
32+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
33+
34+
space = SearchSpace({
35+
"n_estimators": Integer(50, 300),
36+
"max_depth": Integer(3, 20),
37+
"criterion": Categorical(["gini", "entropy"]),
38+
})
39+
40+
def objective(params):
41+
model = RandomForestClassifier(**params, random_state=42)
42+
model.fit(X_train, y_train)
43+
score = model.score(X_test, y_test)
44+
return -score # минимизируем ошибку
45+
46+
# Байесовская оптимизация
47+
bo = BayesianOptimizer(space, objective)
48+
best_bo = bo.optimize(n_iter=30)
49+
print("Bayesian best:", best_bo)
50+
51+
# Эволюционный алгоритм
52+
ea = EvolutionaryOptimizer(space, objective)
53+
best_ea = ea.optimize(population_size=20, generations=10)
54+
print("Evolutionary best:", best_ea)
55+
```
56+

examples/sk.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
from sklearn.datasets import load_iris
2+
from sklearn.ensemble import RandomForestClassifier
3+
from sklearn.model_selection import train_test_split
4+
from hyperboost.space import SearchSpace
5+
from hyperboost.space import Real, Integer, Categorical
6+
from hyperboost.optimizers import BayesianOptimizer
7+
from hyperboost.optimizers import EvolutionaryOptimizer
8+
9+
X, y = load_iris(return_X_y=True)
10+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
11+
12+
space = SearchSpace({
13+
"n_estimators": Integer(50, 300),
14+
"max_depth": Integer(3, 20),
15+
"criterion": Categorical(["gini", "entropy"]),
16+
})
17+
18+
def objective(params):
19+
model = RandomForestClassifier(**params, random_state=42)
20+
model.fit(X_train, y_train)
21+
score = model.score(X_test, y_test)
22+
return -score # минимизируем ошибку
23+
24+
# Байесовская оптимизация
25+
bo = BayesianOptimizer(space, objective)
26+
best_bo = bo.optimize(n_iter=30)
27+
print("Bayesian best:", best_bo)
28+
29+
# Эволюционный алгоритм
30+
ea = EvolutionaryOptimizer(space, objective)
31+
best_ea = ea.optimize(population_size=20, generations=10)
32+
print("Evolutionary best:", best_ea)

optimizers/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from bayesian import BayesianOptimizer
2+
from evo import EvolutionaryOptimizer

optimizers/bayesian.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import numpy as np
2+
from sklearn.gaussian_process import GaussianProcessRegressor
3+
from sklearn.gaussian_process.kernels import Matern
4+
from scipy.optimize import minimize
5+
from hyperboost.utils.visual import plot_convergence
6+
7+
8+
class BayesianOptimizer:
9+
def __init__(self, space, objective_func, n_initial_points=5):
10+
self.model = None
11+
self.space = space
12+
self.objective_func = objective_func
13+
self.n_initial_points = n_initial_points
14+
self.X = []
15+
self.y = []
16+
self.history = []
17+
self._initialize()
18+
19+
def _initialize(self):
20+
initial_points = self.space.sample(self.n_initial_points)
21+
for point in initial_points:
22+
loss = self.objective_func(point)
23+
self.X.append(self.space.transform(point))
24+
self.y.append(loss)
25+
self.history.append(loss)
26+
27+
def _acquisition(self, x, kappa=2.0):
28+
mu, sigma = self.model.predict([x], return_std=True)
29+
mu = mu[0]
30+
sigma = sigma[0]
31+
return mu - kappa * sigma
32+
33+
def _optimize_acquisition(self):
34+
best_x = None
35+
best_score = float('inf')
36+
bounds = [(0.0, 1.0)] * len(self.space.param_names)
37+
for _ in range(20):
38+
x0 = np.random.rand(len(bounds))
39+
res = minimize(lambda x: self._acquisition(x), x0=x0, bounds=bounds)
40+
if res.fun < best_score:
41+
best_score = res.fun
42+
best_x = res.x
43+
return best_x
44+
45+
def optimize(self, n_iter=30):
46+
kernel = Matern(nu=2.5)
47+
self.model = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=10)
48+
for _ in range(n_iter):
49+
self.model.fit(self.X, self.y)
50+
next_point = self._optimize_acquisition()
51+
params = self.space.inverse_transform(next_point)
52+
loss = self.objective_func(params)
53+
self.X.append(next_point)
54+
self.y.append(loss)
55+
self.history.append(loss)
56+
best_idx = np.argmin(self.y)
57+
plot_convergence(self.history)
58+
return self.space.inverse_transform(self.X[best_idx])
59+

optimizers/evo.py

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
import random
2+
import numpy as np
3+
from hyperboost.utils.visual import plot_convergence
4+
5+
6+
class EvolutionaryOptimizer:
7+
def __init__(self, space, objective_func):
8+
self.space = space
9+
self.objective_func = objective_func
10+
self.population = []
11+
self.history = []
12+
13+
def _create_individual(self):
14+
return self.space.sample()[0]
15+
16+
def _evaluate_population(self):
17+
scores = []
18+
for ind in self.population:
19+
score = self.objective_func(ind)
20+
scores.append((ind, score))
21+
return sorted(scores, key=lambda x: x[1])
22+
23+
def _crossover(self, parent1, parent2):
24+
child = {}
25+
for name in self.space.param_names:
26+
if random.random() < 0.5:
27+
child[name] = parent1[name]
28+
else:
29+
child[name] = parent2[name]
30+
return child
31+
32+
def _mutate(self, individual, mutation_rate=0.1):
33+
mutated = individual.copy()
34+
for name in self.space.param_names:
35+
if random.random() < mutation_rate:
36+
param = self.space.space_dict[name]
37+
mutated[name] = param.sample()[0]
38+
return mutated
39+
40+
def optimize(self, population_size=20, generations=10):
41+
self.population = self.space.sample(population_size)
42+
for gen in range(generations):
43+
evaluated = self._evaluate_population()
44+
next_gen = [evaluated[0][0]] # Elitism
45+
while len(next_gen) < population_size:
46+
p1, p2 = random.choices(evaluated[:5], k=2)
47+
child = self._crossover(p1[0], p2[0])
48+
child = self._mutate(child)
49+
next_gen.append(child)
50+
self.population = next_gen
51+
best_score = evaluated[0][1]
52+
self.history.append(best_score)
53+
plot_convergence(self.history)
54+
best = self._evaluate_population()[0][0]
55+
return best

parallel/evaluate.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
from concurrent.futures import ThreadPoolExecutor
2+
3+
4+
def eval(func, points):
5+
try:
6+
with ThreadPoolExecutor() as executor:
7+
results = list(executor.map(func, points))
8+
return results
9+
except AttributeError:
10+
raise "Your model dont have a parallel mode"

space/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from .types import Real, Integer, Categorical
2+
from .search import SearchSpace

space/search.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import numpy as np
2+
3+
class SearchSpace:
4+
def __init__(self, space_dict):
5+
self.space_dict = space_dict
6+
self.param_names = list(space_dict.keys())
7+
self.param_objs = list(space_dict.values())
8+
9+
def sample(self, n=1):
10+
samples = []
11+
for _ in range(n):
12+
point = {}
13+
for name, param in zip(self.param_names, self.param_objs):
14+
point[name] = param.sample()[0]
15+
samples.append(point)
16+
return samples
17+
18+
def transform(self, params):
19+
vec = []
20+
for name in self.param_names:
21+
val = params[name]
22+
param_obj = self.space_dict[name]
23+
vec.append(param_obj.transform(val))
24+
return np.array(vec)
25+
26+
def inverse_transform(self, vector):
27+
params = {}
28+
for i, name in enumerate(self.param_names):
29+
param_obj = self.space_dict[name]
30+
params[name] = param_obj.inverse_transform(vector[i])
31+
return params

space/types.py

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import numpy as np
2+
3+
class Real:
4+
def __init__(self, low, high):
5+
self.low = low
6+
self.high = high
7+
8+
def sample(self, n=1):
9+
return np.random.uniform(self.low, self.high, size=n)
10+
11+
def transform(self, value):
12+
return (value - self.low) / (self.high - self.low)
13+
14+
def inverse_transform(self, value):
15+
return value * (self.high - self.low) + self.low
16+
17+
18+
class Integer:
19+
def __init__(self, low, high):
20+
self.low = low
21+
self.high = high
22+
23+
def sample(self, n=1):
24+
return np.random.randint(self.low, self.high + 1, size=n)
25+
26+
def transform(self, value):
27+
return (value - self.low) / (self.high - self.low)
28+
29+
def inverse_transform(self, value):
30+
return int(round(value * (self.high - self.low) + self.low))
31+
32+
33+
class Categorical:
34+
def __init__(self, choices):
35+
self.choices = choices
36+
37+
def sample(self, n=1):
38+
return [np.random.choice(self.choices) for _ in range(n)]
39+
40+
def transform(self, value):
41+
return self.choices.index(value) / (len(self.choices) - 1)
42+
43+
def inverse_transform(self, value):
44+
idx = round(value * (len(self.choices) - 1))
45+
return self.choices[int(idx)]

0 commit comments

Comments
 (0)