Skip to content

Commit 29766ae

Browse files
authored
Add files via upload
1 parent ad15ec5 commit 29766ae

1 file changed

Lines changed: 4 additions & 4 deletions

File tree

multioptpy/ieip.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1264,6 +1264,8 @@ def __init__(self, config):
12641264
self.element_number_list = None
12651265
self.ADDths = [] # List to store ADD theta classes
12661266
self.optimized_structures = {} # Dictionary to store optimized structures by ADD ID
1267+
self.max_iterations = 100
1268+
self.gtol = 1e-3
12671269

12681270
def get_unit_conversion(self):
12691271
"""Return bohr to angstrom conversion factor"""
@@ -1625,8 +1627,6 @@ def minimizeTh_LBFGS(self, thetalist_init, f, grad, eqpoint, IOEsphereA):
16251627

16261628
# L-BFGS parameters
16271629
m = min(10, n_dims) # Memory size for L-BFGS
1628-
max_iterations = 100
1629-
gtol = 1.0e-4 # Gradient tolerance for convergence
16301630

16311631
# Keep track of the best solution found
16321632
best_thetalist = copy.deepcopy(thetalist)
@@ -1656,9 +1656,9 @@ def minimizeTh_LBFGS(self, thetalist_init, f, grad, eqpoint, IOEsphereA):
16561656
best_gradient_norm = gradient_norm
16571657

16581658
# Iterate until convergence or max iterations
1659-
for iteration in range(max_iterations):
1659+
for iteration in range(self.max_iterations):
16601660
# Check for convergence
1661-
if gradient_norm < gtol:
1661+
if gradient_norm < self.gtol:
16621662
print(f"Optimization converged after {iteration} iterations with gradient norm: {gradient_norm:.6f}")
16631663
return thetalist
16641664

0 commit comments

Comments
 (0)