-
Notifications
You must be signed in to change notification settings - Fork 31
Expand file tree
/
Copy pathtutorial_aposmm.py
More file actions
48 lines (39 loc) · 1.7 KB
/
tutorial_aposmm.py
File metadata and controls
48 lines (39 loc) · 1.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import numpy as np
from tutorial_six_hump_camel import six_hump_camel
import libensemble.gen_funcs
from libensemble.alloc_funcs.persistent_aposmm_alloc import persistent_aposmm_alloc
from libensemble.gen_funcs.persistent_aposmm import aposmm
from libensemble.libE import libE
from libensemble.tools import parse_args
libensemble.gen_funcs.rc.aposmm_optimizers = "scipy"
nworkers, is_manager, libE_specs, _ = parse_args()
sim_specs = {
"sim_f": six_hump_camel, # Simulation function
"in": ["x"], # Accepts 'x' values
"out": [("f", float)], # Returns f(x) values
}
gen_out = [
("x", float, 2), # Produces 'x' values
("x_on_cube", float, 2), # 'x' values scaled to unit cube
("sim_id", int), # Produces IDs for sim order
("local_min", bool), # Is a point a local minimum?
("local_pt", bool), # Is a point from a local opt run?
]
gen_specs = {
"gen_f": aposmm, # APOSMM generator function
"persis_in": ["x", "f", "x_on_cube", "sim_id", "local_min", "local_pt"],
"out": gen_out, # Output defined like above dict
"user": {
"initial_sample_size": 100, # Random sample 100 points to start
"localopt_method": "scipy_Nelder-Mead",
"opt_return_codes": [0], # Return code specific to localopt_method
"max_active_runs": 6, # Occur in parallel
"lb": np.array([-2, -1]), # Lower bound of search domain
"ub": np.array([2, 1]), # Upper bound of search domain
},
}
alloc_specs = {"alloc_f": persistent_aposmm_alloc}
exit_criteria = {"sim_max": 2000}
H, persis_info, flag = libE(sim_specs, gen_specs, exit_criteria, alloc_specs=alloc_specs, libE_specs=libE_specs)
if is_manager:
print("Minima:", H[np.where(H["local_min"])]["x"])