|
| 1 | +import numpy as np |
| 2 | +from libensemble.gen_funcs.persistent_aposmm import ( |
| 3 | + initialize_APOSMM, |
| 4 | + update_history_dist, |
| 5 | + decide_where_to_start_localopt |
| 6 | +) |
| 7 | +from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func |
| 8 | + |
| 9 | + |
| 10 | +def setup_history_and_find_rk(n_s, num_to_start, lb, ub, f_vals, x_points): |
| 11 | + """ |
| 12 | + Populate the history array H with n_s points and bisect over r_k to find a value |
| 13 | + producing num_to_start local optimization start points using decide_where_to_start_localopt. |
| 14 | + |
| 15 | + Parameters: |
| 16 | + - n_s (int): Number of initial sample points. |
| 17 | + - num_to_start (int): Desired number of starting points for local optimization. |
| 18 | + - lb, ub (np.ndarray): Lower and upper bounds of the domain. |
| 19 | + - f_vals (np.ndarray): Function values at each x_point. |
| 20 | + - x_points (np.ndarray): n_s x d array of sample points. |
| 21 | + |
| 22 | + Returns: |
| 23 | + - H (np structured array): Updated history array. |
| 24 | + - rk_final (float): Value of r_k yielding num_to_start local opt starts. |
| 25 | + - inds_to_start (list): Indices in H to start local optimization. |
| 26 | + """ |
| 27 | + |
| 28 | + assert x_points.shape[0] == n_s |
| 29 | + assert f_vals.shape[0] == n_s |
| 30 | + |
| 31 | + n = x_points.shape[1] |
| 32 | + |
| 33 | + H = np.zeros(n_s,dtype=[("sim_id",int), ("x",float,n),("x_on_cube",float,n),("f",float),("local_pt",bool),("sim_ended",bool)]) |
| 34 | + |
| 35 | + # Setup history |
| 36 | + for i in range(n_s): |
| 37 | + H[i]['x'] = x_points[i] |
| 38 | + H[i]['sim_id'] = i |
| 39 | + H[i]['x_on_cube'] = (x_points[i] - lb) / (ub - lb) |
| 40 | + H[i]['f'] = f_vals[i] |
| 41 | + H[i]['local_pt'] = False |
| 42 | + H[i]['sim_ended'] = True # Ensure point is considered by distance function |
| 43 | + |
| 44 | + local_H = initialize_APOSMM(H,{"lb": lb, "ub": ub,"initial_sample_size": n_s, "localopt_method":None},{"comm": None})[-1] |
| 45 | + local_H = local_H[:n_s] # Use only the required number of entries |
| 46 | + |
| 47 | + update_history_dist(local_H,n) |
| 48 | + |
| 49 | + # Search to find r_k that yields exactly num_to_start start points |
| 50 | + r_low, r_high = 1e-5, 2.0 # Conservative initial bounds |
| 51 | + rk_final = None |
| 52 | + tol = 1e-5 |
| 53 | + |
| 54 | + while r_high - r_low > tol: |
| 55 | + r_mid = (r_low + r_high) / 2.0 |
| 56 | + inds_to_start = decide_where_to_start_localopt(local_H, n, n_s, r_mid) |
| 57 | + |
| 58 | + if len(inds_to_start) < num_to_start: |
| 59 | + r_high = r_mid |
| 60 | + elif len(inds_to_start) > num_to_start: |
| 61 | + r_low = r_mid |
| 62 | + else: |
| 63 | + rk_final = r_mid |
| 64 | + break |
| 65 | + |
| 66 | + # Final decision (in case we didn't hit num_to_start exactly) |
| 67 | + if rk_final is None: |
| 68 | + rk_final = (r_low + r_high) / 2.0 |
| 69 | + inds_to_start = decide_where_to_start_localopt(local_H, n, n_s, rk_final) |
| 70 | + |
| 71 | + return local_H, rk_final, inds_to_start |
| 72 | + |
| 73 | + |
| 74 | +if __name__ == "__main__": |
| 75 | + |
| 76 | + # Define domain bounds |
| 77 | + lb = np.array([-3.0, -2.0]) |
| 78 | + ub = np.array([3.0, 2.0]) |
| 79 | + dim = len(lb) |
| 80 | + |
| 81 | + # Number of sample points and desired number of start points |
| 82 | + num_samples = 100 |
| 83 | + num_to_start = 6 |
| 84 | + |
| 85 | + # Generate random sample points uniformly in the [lb,ub] box |
| 86 | + x_points = lb + (ub - lb) * np.random.uniform(size=(num_samples, dim)) |
| 87 | + |
| 88 | + # Evaluate six-hump camel function at each point |
| 89 | + f_vals = np.array([six_hump_camel_func(x) for x in x_points]) |
| 90 | + |
| 91 | + # Call the history setup and bisection function |
| 92 | + H, rk_final, inds_to_start = setup_history_and_find_rk(num_samples, num_to_start, lb, ub, f_vals, x_points) |
| 93 | + |
| 94 | + assert len(inds_to_start) == num_to_start, f"Didn't find correct number of starting points. Found {len(inds_to_start)} instead of {num_to_start}" |
| 95 | + |
| 96 | + # Output results |
| 97 | + print(f"Chosen r_k: {rk_final:.6f}") |
| 98 | + print(f"Indices to start local optimization (num_to_start={num_to_start}): {inds_to_start}") |
0 commit comments