Skip to content

Commit a26524e

Browse files
authored
Merge pull request #1562 from Libensemble/feature/searching_over_rk_example
Example showing a search over rk
2 parents 7f789b9 + 00bab2f commit a26524e

2 files changed

Lines changed: 126 additions & 1 deletion

File tree

libensemble/gen_funcs/persistent_aposmm.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
def cdist(XA, XB, metric="euclidean"):
2626
"""Compute the pairwise Euclidean distances"""
2727

28-
# Just so dont have to change the call
28+
# Just so we don't have to change the call
2929
if metric != "euclidean":
3030
raise ValueError("Only 'euclidean' metric is supported in this implementation.")
3131

@@ -120,6 +120,11 @@ def aposmm(H, persis_info, gen_specs, libE_info):
120120
`test_persistent_aposmm_with_grad <https://github.com/Libensemble/libensemble/blob/develop/libensemble/tests/regression_tests/test_persistent_aposmm_with_grad.py>`_
121121
for an example where past function values are given to libEnsemble/APOSMM.
122122
123+
.. seealso::
124+
125+
`test_aposmm_starting_point_finder <https://github.com/Libensemble/libensemble/blob/develop/libensemble/tests/unit_tests/test_aposmm_starting_point_finder.py>`_
126+
for an example the APOSMM r_k radius logic is adjusted to produce a certain number of localopt starting points.
127+
123128
"""
124129
"""
125130
Description of intermediate variables in aposmm:
Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
import numpy as np
2+
from libensemble.gen_funcs.persistent_aposmm import (
3+
initialize_APOSMM,
4+
update_history_dist,
5+
decide_where_to_start_localopt,
6+
)
7+
from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func
8+
from libensemble.tests.regression_tests.support import six_hump_camel_minima as known_minima
9+
10+
11+
def setup_history_and_find_rk(n_s, num_to_start, lb, ub, f_vals, x_points):
12+
"""
13+
Populate the history array H with n_s points and bisect over r_k to find a value
14+
producing num_to_start local optimization start points using decide_where_to_start_localopt.
15+
16+
Parameters:
17+
- n_s (int): Number of initial sample points.
18+
- num_to_start (int): Desired number of starting points for local optimization.
19+
- lb, ub (np.ndarray): Lower and upper bounds of the domain.
20+
- f_vals (np.ndarray): Function values at each x_point.
21+
- x_points (np.ndarray): n_s x d array of sample points.
22+
23+
Returns:
24+
- H (np structured array): Updated history array.
25+
- rk_final (float): Value of r_k yielding num_to_start local opt starts.
26+
- inds_to_start (list): Indices in H to start local optimization.
27+
"""
28+
29+
assert x_points.shape[0] == n_s
30+
assert f_vals.shape[0] == n_s
31+
32+
n = x_points.shape[1]
33+
34+
H = np.zeros(
35+
n_s,
36+
dtype=[
37+
("sim_id", int),
38+
("x", float, n),
39+
("x_on_cube", float, n),
40+
("f", float),
41+
("local_pt", bool),
42+
("sim_ended", bool),
43+
],
44+
)
45+
46+
# Setup history
47+
for i in range(n_s):
48+
H[i]["x"] = x_points[i]
49+
H[i]["sim_id"] = i
50+
H[i]["x_on_cube"] = (x_points[i] - lb) / (ub - lb)
51+
H[i]["f"] = f_vals[i]
52+
H[i]["local_pt"] = False
53+
H[i]["sim_ended"] = True # Ensure point is considered by distance function
54+
55+
user_specs = {"lb": lb, "ub": ub, "initial_sample_size": n_s, "localopt_method": None}
56+
local_H = initialize_APOSMM(H, user_specs, {"comm": None})[-1]
57+
local_H = local_H[:n_s] # Use only the required number of entries
58+
59+
update_history_dist(local_H, n)
60+
61+
# Search to find r_k that yields exactly num_to_start start points
62+
r_low, r_high = 1e-5, 2.0 # Conservative initial bounds
63+
rk_final = None
64+
tol = 1e-5
65+
66+
while r_high - r_low > tol:
67+
r_mid = (r_low + r_high) / 2.0
68+
inds_to_start = decide_where_to_start_localopt(local_H, n, n_s, r_mid)
69+
70+
if len(inds_to_start) < num_to_start:
71+
r_high = r_mid
72+
elif len(inds_to_start) > num_to_start:
73+
r_low = r_mid
74+
else:
75+
rk_final = r_mid
76+
break
77+
78+
# Final decision (in case we didn't hit num_to_start exactly)
79+
if rk_final is None:
80+
rk_final = (r_low + r_high) / 2.0
81+
inds_to_start = decide_where_to_start_localopt(local_H, n, n_s, rk_final)
82+
83+
return local_H, rk_final, inds_to_start
84+
85+
86+
if __name__ == "__main__":
87+
88+
# Define domain bounds
89+
lb = np.array([-3.0, -2.0])
90+
ub = np.array([3.0, 2.0])
91+
dim = len(lb)
92+
93+
# Number of sample points and desired number of start points
94+
num_samples = 1000
95+
num_to_start = 6
96+
97+
# Generate random sample points uniformly in the [lb,ub] box
98+
x_points = lb + (ub - lb) * np.random.uniform(size=(num_samples, dim))
99+
100+
# Evaluate six-hump camel function at each point
101+
f_vals = np.array([six_hump_camel_func(x) for x in x_points])
102+
103+
# Call the history setup and bisection function
104+
H, rk_final, inds_to_start = setup_history_and_find_rk(num_samples, num_to_start, lb, ub, f_vals, x_points)
105+
106+
assert len(inds_to_start) == num_to_start, f"Found {len(inds_to_start)} starting points instead of {num_to_start}"
107+
108+
starting_pts = H["x"][inds_to_start]
109+
sorted_starting = starting_pts[np.lexsort(starting_pts.T[::-1])]
110+
sorted_known = known_minima[np.lexsort(known_minima.T[::-1])]
111+
112+
print(
113+
f"For this problem, we know the minima.\n"
114+
f"The chosen starting points:\n{sorted_starting}\n"
115+
f"should be close to the known minima:\n{sorted_known}"
116+
)
117+
118+
# Output results
119+
print(f"Chosen r_k: {rk_final:.6f}")
120+
print(f"Indices to start local optimization (num_to_start={num_to_start}): {inds_to_start}")

0 commit comments

Comments
 (0)