@@ -532,7 +532,9 @@ def gen_candidates_torch(
532532 optimizer (Optimizer): The pytorch optimizer to use to perform
533533 candidate search.
534534 options: Options used to control the optimization. Includes
535- maxiter: Maximum number of iterations
535+ optimizer_options: Dict of additional options to pass to the optimizer
536+ (e.g. lr, weight_decay)
537+ stopping_criterion_options: Dict of options for the stopping criterion.
536538 callback: A callback function accepting the current iteration, loss,
537539 and gradients as arguments. This function is executed after computing
538540 the loss and gradients, but before calling the optimizer.
@@ -571,7 +573,6 @@ def gen_candidates_torch(
571573 # the 1st order optimizers implemented in this method.
572574 # Here, it does not matter whether one combines multiple optimizations into
573575 # one or not.
574- options .pop ("max_optimization_problem_aggregation_size" , None )
575576 _clamp = partial (columnwise_clamp , lower = lower_bounds , upper = upper_bounds )
576577 clamped_candidates = _clamp (initial_conditions )
577578 if fixed_features :
@@ -580,11 +581,30 @@ def gen_candidates_torch(
580581 [i for i in range (clamped_candidates .shape [- 1 ]) if i not in fixed_features ],
581582 ]
582583 clamped_candidates = clamped_candidates .requires_grad_ (True )
583- _optimizer = optimizer (params = [clamped_candidates ], lr = options .get ("lr" , 0.025 ))
584+
585+ # Extract optimizer-specific options from the options dict
586+ optimizer_options = options .get ("optimizer_options" , {}).copy ()
587+ stopping_criterion_options = options .get ("stopping_criterion_options" , {}).copy ()
588+
589+ # Backward compatibility: if old 'maxiter' parameter is passed, move it to
590+ # stopping_criterion_options with a deprecation warning
591+ if "maxiter" in options :
592+ warnings .warn (
593+ "Passing 'maxiter' directly in options is deprecated. "
594+ "Please use options['stopping_criterion_options']['maxiter'] instead." ,
595+ DeprecationWarning ,
596+ stacklevel = 2 ,
597+ )
598+ # For backward compatibility, pass to stopping_criterion_options
599+ if "maxiter" not in stopping_criterion_options :
600+ stopping_criterion_options ["maxiter" ] = options ["maxiter" ]
601+
602+ optimizer_options .setdefault ("lr" , 0.025 )
603+ _optimizer = optimizer (params = [clamped_candidates ], ** optimizer_options )
584604
585605 i = 0
586606 stop = False
587- stopping_criterion = ExpMAStoppingCriterion (** options )
607+ stopping_criterion = ExpMAStoppingCriterion (** stopping_criterion_options )
588608 while not stop :
589609 i += 1
590610 with torch .no_grad ():
0 commit comments