@@ -469,6 +469,9 @@ def test_learner_performance_is_invariant_under_scaling(
469469 """
470470 # for now we just scale X and Y by random factors
471471 f = generate_random_parametrization (f )
472+ if learner_type is AverageLearner1D :
473+ # no noise for AverageLearner1D to make it deterministic
474+ f = ft .partial (f , sigma = 0 )
472475
473476 control_kwargs = dict (learner_kwargs )
474477 control = learner_type (f , ** control_kwargs )
@@ -478,7 +481,14 @@ def test_learner_performance_is_invariant_under_scaling(
478481
479482 l_kwargs = dict (learner_kwargs )
480483 l_kwargs ["bounds" ] = xscale * np .array (l_kwargs ["bounds" ])
481- learner = learner_type (lambda x : yscale * f (np .array (x ) / xscale ), ** l_kwargs )
484+
485+ def scale_x (x ):
486+ if isinstance (learner , AverageLearner1D ):
487+ seed , x = x
488+ return (seed , x / xscale )
489+ return np .array (x ) / xscale
490+
491+ learner = learner_type (lambda x : yscale * f (scale_x (x )), ** l_kwargs )
482492
483493 if learner_type in [Learner1D , LearnerND , AverageLearner1D ]:
484494 learner ._recompute_losses_factor = 1
@@ -497,7 +507,7 @@ def test_learner_performance_is_invariant_under_scaling(
497507 learner .tell_many (xs , [learner .function (x ) for x in xs ])
498508
499509 # Check whether the points returned are the same
500- xs_unscaled = np . array ( xs ) / xscale
510+ xs_unscaled = [ scale_x ( x ) for x in xs ]
501511 assert np .allclose (xs_unscaled , cxs )
502512
503513 # Check if the losses are close
0 commit comments