diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 3e13cafa4658..74f8d6a992ea 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -70,7 +70,7 @@ RUN curl -fsSL -O https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.3.2- # if you change packages, don't forget to update them in tests/integration/helpers/cluster.py RUN packages="org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,\ io.delta:delta-core_2.12:2.3.0,\ -org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" \ +org.apache.iceberg:iceberg-spark-runtime-3.5_2.13:1.8.1" \ && /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "$packages" > /dev/null \ && find /root/.ivy2/ -name '*.jar' -exec ln -sf {} /spark-3.3.2-bin-hadoop3/jars/ \; diff --git a/utils/data-lakes-importer.py b/utils/data-lakes-importer.py index 0a00dd2a783f..379e592dafa4 100755 --- a/utils/data-lakes-importer.py +++ b/utils/data-lakes-importer.py @@ -15,7 +15,7 @@ def get_spark_for_iceberg(result_path): pyspark.sql.SparkSession.builder.appName("spark_test") .config( "spark.jars.packages", - "org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0", + "org.apache.iceberg:iceberg-spark-runtime-3.5_2.13:1.8.1", ) .config( "spark.sql.catalog.spark_catalog",