-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest_ralg.py
More file actions
185 lines (153 loc) · 5.68 KB
/
test_ralg.py
File metadata and controls
185 lines (153 loc) · 5.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
#!/usr/bin/env python
"""
Test script to verify ralg solver works with NumPy 2.x
"""
import sys
import os
import numpy as np
# Add OpenOpt to path
sys.path.insert(0, 'OpenOpt')
print("=" * 60)
print("Testing ralg solver with NumPy 2.x")
print(f"NumPy version: {np.__version__}")
print(f"Python version: {sys.version}")
print("=" * 60)
def test_simple_unconstrained():
"""Test simple unconstrained optimization"""
print("\n1. Testing simple unconstrained problem...")
try:
from openopt import NLP
# Simple quadratic function
f = lambda x: (x[0]-1)**2 + (x[1]-2)**2
x0 = np.array([0.0, 0.0])
p = NLP(f, x0, iprint=-1, maxIter=1000)
r = p.solve('ralg')
error = np.linalg.norm(r.xf - np.array([1, 2]))
success = error < 1e-3
print(f" Solution: {r.xf}")
print(f" Expected: [1, 2]")
print(f" Error: {error:.6f}")
print(f" {'✓ PASSED' if success else '✗ FAILED'}")
return success
except Exception as e:
print(f" ✗ FAILED: {e}")
import traceback
traceback.print_exc()
return False
def test_with_bounds():
"""Test with box constraints"""
print("\n2. Testing problem with bounds...")
try:
from openopt import NLP
# Quadratic with bounds
f = lambda x: (x[0]-1)**2 + (x[1]-2)**2
x0 = np.array([0.0, 0.0])
lb = np.array([0.5, 0.5])
ub = np.array([1.5, 1.5])
p = NLP(f, x0, lb=lb, ub=ub, iprint=-1, maxIter=1000)
r = p.solve('ralg')
# Expected solution should be clipped to bounds
expected = np.array([1.0, 1.5]) # x[1] is clipped to 1.5
error = np.linalg.norm(r.xf - expected)
bounds_tol = 1e-5
bounds_satisfied = np.all(r.xf >= lb - bounds_tol) and np.all(r.xf <= ub + bounds_tol)
success = error < 0.01 and bounds_satisfied
print(f" Solution: {r.xf}")
print(f" Expected: {expected}")
print(f" Error: {error:.6f}")
print(f" Bounds satisfied: {bounds_satisfied}")
print(f" {'✓ PASSED' if success else '✗ FAILED'}")
return success
except Exception as e:
print(f" ✗ FAILED: {e}")
import traceback
traceback.print_exc()
return False
def test_with_linear_constraints():
"""Test with linear constraints"""
print("\n3. Testing problem with linear constraints...")
try:
from openopt import NLP
# Minimize (x-2)^2 + (y-2)^2
# Subject to x + y <= 2
f = lambda x: (x[0]-2)**2 + (x[1]-2)**2
x0 = np.array([0.0, 0.0])
# Linear inequality constraint: x + y <= 2
A = np.array([[1, 1]])
b = np.array([2])
p = NLP(f, x0, A=A, b=b, iprint=-1, maxIter=1000)
r = p.solve('ralg')
# Expected solution should be [1, 1] (on the constraint boundary)
expected = np.array([1.0, 1.0])
error = np.linalg.norm(r.xf - expected)
constraint_satisfied = r.xf[0] + r.xf[1] <= 2 + 1e-6
success = error < 0.1 and constraint_satisfied
print(f" Solution: {r.xf}")
print(f" Expected: {expected}")
print(f" Error: {error:.6f}")
print(f" Constraint x+y={r.xf[0]+r.xf[1]:.4f} <= 2: {constraint_satisfied}")
print(f" {'✓ PASSED' if success else '✗ FAILED'}")
return success
except Exception as e:
print(f" ✗ FAILED: {e}")
import traceback
traceback.print_exc()
return False
def test_rosenbrock():
"""Test on Rosenbrock function"""
print("\n4. Testing Rosenbrock function...")
try:
from openopt import NLP
# Rosenbrock function
def rosenbrock(x):
return (1-x[0])**2 + 100*(x[1]-x[0]**2)**2
x0 = np.array([-1.2, 1.0])
p = NLP(rosenbrock, x0, iprint=-1, maxIter=10000)
r = p.solve('ralg')
# Expected minimum at [1, 1]
expected = np.array([1.0, 1.0])
error = np.linalg.norm(r.xf - expected)
success = error < 0.1
print(f" Solution: {r.xf}")
print(f" Expected: {expected}")
print(f" Error: {error:.6f}")
print(f" Function value: {r.ff:.6f}")
print(f" {'✓ PASSED' if success else '✗ FAILED'}")
return success
except Exception as e:
print(f" ✗ FAILED: {e}")
import traceback
traceback.print_exc()
return False
def main():
"""Run all tests"""
tests = [
("Simple unconstrained", test_simple_unconstrained),
("With bounds", test_with_bounds),
("With linear constraints", test_with_linear_constraints),
("Rosenbrock", test_rosenbrock)
]
results = {}
for name, test_func in tests:
results[name] = test_func()
# Summary
print("\n" + "=" * 60)
print("SUMMARY")
print("=" * 60)
passed = sum(results.values())
total = len(results)
print(f"Tests passed: {passed}/{total}")
for name, result in results.items():
status = "✓" if result else "✗"
print(f" {status} {name}")
if passed == total:
print("\n✓ All tests passed! ralg solver works with NumPy 2.x")
elif passed > 0:
print(f"\n⚠ Partial success: {passed}/{total} tests passed")
print(" ralg solver partially works but may have some issues")
else:
print("\n✗ All tests failed - ralg solver has significant issues")
return passed == total
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)