Skip to content

Commit c639f1c

Browse files
authored
ci: Fix pyspark test suite (#4382)
- pyspark was failing to bind to an address - aws_lambda fixed itself 🪄
1 parent eee4cac commit c639f1c

File tree

1 file changed

+4
-3
lines changed

1 file changed

+4
-3
lines changed

tests/integrations/spark/test_spark.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
)
1111
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
1212

13-
from pyspark import SparkContext
13+
from pyspark import SparkConf, SparkContext
1414

1515
from py4j.protocol import Py4JJavaError
1616

@@ -25,12 +25,13 @@ def sentry_init_with_reset(sentry_init):
2525
from sentry_sdk.integrations import _processed_integrations
2626

2727
yield lambda: sentry_init(integrations=[SparkIntegration()])
28-
_processed_integrations.remove("spark")
28+
_processed_integrations.discard("spark")
2929

3030

3131
@pytest.fixture(scope="function")
3232
def create_spark_context():
33-
yield lambda: SparkContext(appName="Testing123")
33+
conf = SparkConf().set("spark.driver.bindAddress", "127.0.0.1")
34+
yield lambda: SparkContext(conf=conf, appName="Testing123")
3435
SparkContext._active_spark_context.stop()
3536

3637

0 commit comments

Comments
 (0)