Skip to content

Commit 7ff50a2

Browse files
committed
chore: Update Java distribution and enhance Spark configurations
- Changed Java distribution from Oracle to Temurin in build and check workflows for improved compatibility. - Added "spark.executor.processTreeMetrics.enabled" configuration to multiple files to enhance Spark metrics handling. - Ensured consistency across application and test configurations by aligning the settings in Constants.scala, application.conf, and application-integration.conf.
1 parent 8046b48 commit 7ff50a2

6 files changed

Lines changed: 12 additions & 6 deletions

File tree

.github/workflows/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ jobs:
3535
java-version: '17'
3636
java-package: jdk
3737
architecture: x64
38-
distribution: oracle
38+
distribution: temurin
3939
- name: Login to DockerHub
4040
uses: docker/login-action@v2
4141
with:

.github/workflows/check.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ jobs:
1515
java-version: '17'
1616
java-package: jdk
1717
architecture: x64
18-
distribution: oracle
18+
distribution: temurin
1919
- name: Gradle build with cache
2020
uses: burrunan/gradle-cache-action@v1
2121
with:

api/src/main/scala/io/github/datacatering/datacaterer/api/model/Constants.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,8 @@ object Constants {
289289
"spark.hadoop.fs.hdfs.impl" -> "org.apache.hadoop.hdfs.DistributedFileSystem",
290290
"spark.hadoop.fs.file.impl" -> "com.globalmentor.apache.hadoop.fs.BareLocalFileSystem",
291291
"spark.sql.extensions" -> "io.delta.sql.DeltaSparkSessionExtension,org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
292-
"spark.metrics.executorMetricsSource.enabled" -> "false"
292+
"spark.metrics.executorMetricsSource.enabled" -> "false",
293+
"spark.executor.processTreeMetrics.enabled" -> "false"
293294
)
294295

295296
//jdbc defaults

app/src/test/resources/application-integration.conf

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,10 @@ runtime {
110110
"spark.hadoop.fs.s3a.bucket.all.committer.magic.enabled" = "true",
111111
"spark.hadoop.fs.hdfs.impl" = "org.apache.hadoop.hdfs.DistributedFileSystem",
112112
"spark.hadoop.fs.file.impl" = "com.globalmentor.apache.hadoop.fs.BareLocalFileSystem",
113-
"spark.sql.extensions" = "io.delta.sql.DeltaSparkSessionExtension,org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
113+
"spark.sql.extensions" = "io.delta.sql.DeltaSparkSessionExtension,org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
114+
"spark.metrics.executorMetricsSource.enabled" = "false",
115+
"spark.ui.enabled" = "false",
116+
"spark.executor.processTreeMetrics.enabled" = "false"
114117
}
115118
}
116119

app/src/test/resources/application.conf

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,9 @@ runtime{
4141
"spark.hadoop.fs.hdfs.impl": "org.apache.hadoop.hdfs.DistributedFileSystem",
4242
"spark.hadoop.fs.file.impl": "com.globalmentor.apache.hadoop.fs.BareLocalFileSystem",
4343
"spark.sql.extensions": "io.delta.sql.DeltaSparkSessionExtension,org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions",
44-
"spark.metrics.executorMetricsSource.enabled": "false"
44+
"spark.metrics.executorMetricsSource.enabled": "false",
45+
"spark.ui.enabled": "false",
46+
"spark.executor.processTreeMetrics.enabled": "false"
4547
}
4648
}
4749

app/src/test/scala/io/github/datacatering/datacaterer/core/util/SparkSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ trait SparkSuite extends AnyFunSuite with BeforeAndAfterAll with BeforeAndAfterE
1212
.appName("spark tests")
1313
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
1414
.config("spark.sql.shuffle.partitions", "2")
15-
// .config("spark.ui.enabled", "false")
15+
.config("spark.ui.enabled", "false")
1616
.config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension")
1717
.config("spark.metrics.executorMetricsSource.enabled", "false")
1818
.getOrCreate()

0 commit comments

Comments
 (0)