@@ -28,11 +28,12 @@ def sentry_init_with_reset(sentry_init):
2828 _processed_integrations .discard ("spark" )
2929
3030
31- @pytest .fixture (scope = "function " )
31+ @pytest .fixture (scope = "session " )
3232def create_spark_context ():
3333 conf = SparkConf ().set ("spark.driver.bindAddress" , "127.0.0.1" )
34- yield lambda : SparkContext (conf = conf , appName = "Testing123" )
35- SparkContext ._active_spark_context .stop ()
34+ sc = SparkContext (conf = conf , appName = "Testing123" )
35+ yield lambda : sc
36+ sc .stop ()
3637
3738
3839def test_set_app_properties (create_spark_context ):
@@ -61,12 +62,18 @@ def test_start_sentry_listener(create_spark_context):
6162def test_initialize_spark_integration_before_spark_context_init (
6263 mock_patch_spark_context_init ,
6364 sentry_init_with_reset ,
64- create_spark_context ,
6565):
66- sentry_init_with_reset ()
67- create_spark_context ()
68-
69- mock_patch_spark_context_init .assert_called_once ()
66+ # As we are using the same SparkContext connection for the whole session,
67+ # we clean it during this test.
68+ original_context = SparkContext ._active_spark_context
69+ SparkContext ._active_spark_context = None
70+
71+ try :
72+ sentry_init_with_reset ()
73+ mock_patch_spark_context_init .assert_called_once ()
74+ finally :
75+ # Restore the original one.
76+ SparkContext ._active_spark_context = original_context
7077
7178
7279@patch ("sentry_sdk.integrations.spark.spark_driver._activate_integration" )
0 commit comments