Skip to content

Commit 8f0e22d

Browse files
authored
fix: Fix hostname resolution for spark tests (feast-dev#5610)
1 parent 02c3006 commit 8f0e22d

4 files changed

Lines changed: 31 additions & 4 deletions

File tree

sdk/python/feast/infra/compute_engines/spark/utils.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@ def get_or_create_new_spark_session(
2121
conf=SparkConf().setAll([(k, v) for k, v in spark_config.items()])
2222
)
2323

24+
spark_builder = spark_builder.config("spark.driver.host", "127.0.0.1")
25+
spark_builder = spark_builder.config("spark.driver.bindAddress", "127.0.0.1")
26+
2427
spark_session = spark_builder.getOrCreate()
2528
spark_session.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")
2629
return spark_session

sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,8 @@ def __init__(self, project_name: str, *args, **kwargs):
3535
"spark.eventLog.enabled": "false",
3636
"spark.sql.parser.quotedRegexColumnNames": "true",
3737
"spark.sql.session.timeZone": "UTC",
38+
"spark.driver.host": "127.0.0.1",
39+
"spark.driver.bindAddress": "127.0.0.1",
3840
}
3941
if not self.spark_offline_store_config:
4042
self.create_offline_store_config()

sdk/python/tests/unit/infra/compute_engines/spark/test_nodes.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ def spark_session():
2525
SparkSession.builder.appName("FeastSparkTests")
2626
.master("local[*]")
2727
.config("spark.sql.shuffle.partitions", "1")
28+
.config("spark.driver.host", "127.0.0.1")
29+
.config("spark.driver.bindAddress", "127.0.0.1")
2830
.getOrCreate()
2931
)
3032

sdk/python/tests/unit/transformation/test_spark_transformation.py

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,12 @@ def remove_extra_spaces_sql(df, column_name):
5252

5353
@pytest.fixture
5454
def spark_fixture():
55-
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
55+
spark = (
56+
SparkSession.builder.appName("Testing PySpark Example")
57+
.config("spark.driver.host", "127.0.0.1")
58+
.config("spark.driver.bindAddress", "127.0.0.1")
59+
.getOrCreate()
60+
)
5661
try:
5762
yield spark
5863
finally:
@@ -61,7 +66,12 @@ def spark_fixture():
6166

6267
@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
6368
def test_spark_transformation(spark_fixture):
64-
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
69+
spark = (
70+
SparkSession.builder.appName("Testing PySpark Example")
71+
.config("spark.driver.host", "127.0.0.1")
72+
.config("spark.driver.bindAddress", "127.0.0.1")
73+
.getOrCreate()
74+
)
6575
df = get_sample_df(spark)
6676

6777
spark_transformation = Transformation(
@@ -77,7 +87,12 @@ def test_spark_transformation(spark_fixture):
7787

7888
@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
7989
def test_spark_transformation_init_transformation(spark_fixture):
80-
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
90+
spark = (
91+
SparkSession.builder.appName("Testing PySpark Example")
92+
.config("spark.driver.host", "127.0.0.1")
93+
.config("spark.driver.bindAddress", "127.0.0.1")
94+
.getOrCreate()
95+
)
8196
df = get_sample_df(spark)
8297

8398
spark_transformation = SparkTransformation(
@@ -93,7 +108,12 @@ def test_spark_transformation_init_transformation(spark_fixture):
93108

94109
@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")
95110
def test_spark_transformation_sql(spark_fixture):
96-
spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate()
111+
spark = (
112+
SparkSession.builder.appName("Testing PySpark Example")
113+
.config("spark.driver.host", "127.0.0.1")
114+
.config("spark.driver.bindAddress", "127.0.0.1")
115+
.getOrCreate()
116+
)
97117
df = get_sample_df(spark)
98118

99119
spark_transformation = SparkTransformation(

0 commit comments

Comments
 (0)