Skip to content

[CELEBORN-1413][FOLLOWUP] Bump spark 4.0 version to 4.0.0 #3282

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,43 @@ jobs:
**/target/test-reports/**
**/target/unit-tests.log

spark4:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
java:
- 17
spark:
- '4.0'
shuffle-plugin-class:
- 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO'
steps:
- uses: actions/checkout@v4
- name: Setup JDK ${{ matrix.java }}
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: ${{ matrix.java }}
cache: maven
check-latest: false
- name: Test with Maven
run: |
SPARK_BINARY_VERSION=${{ matrix.spark }}
SPARK_MAJOR_VERSION=${SPARK_BINARY_VERSION%%.*}
PROFILES="-Pgoogle-mirror,spark-${{ matrix.spark }}"
TEST_MODULES="client-spark/common,client-spark/spark-3,client-spark/spark-3-columnar-common,client-spark/spark-${SPARK_MAJOR_VERSION}-shaded,tests/spark-it"
build/mvn $PROFILES -pl $TEST_MODULES -am clean install -DskipTests
# build/mvn $PROFILES -pl $TEST_MODULES -Dspark.shuffle.sort.io.plugin.class=${{ matrix.shuffle-plugin-class }} test
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: spark-${{ matrix.spark }}-java-${{ matrix.java }}-unit-test-log
path: |
**/target/test-reports/**
**/target/unit-tests.log

flink1:
runs-on: ubuntu-22.04
strategy:
Expand Down
37 changes: 37 additions & 0 deletions .github/workflows/sbt.yml
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,43 @@ jobs:
**/target/test-reports/**
**/target/unit-tests.log

spark4:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
spark:
- '4.0'
java:
- 17
scala-binary:
- '2.13'
shuffle-plugin-class:
- 'org.apache.spark.shuffle.celeborn.CelebornShuffleDataIO'
include:
- spark: '4.0'
scala: '2.13.16'
steps:
- uses: actions/checkout@v4
- name: Setup JDK ${{ matrix.java }}
uses: actions/setup-java@v4
with:
distribution: zulu
java-version: ${{ matrix.java }}
check-latest: false
- name: Test with SBT
run: |
# TODO: enable spark4 tests
build/sbt -Dspark.shuffle.plugin.class=${{ matrix.shuffle-plugin-class }} -Pspark-${{ matrix.spark }} ++${{ matrix.scala }} "clean; celeborn-spark-group/package"
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: spark-${{ matrix.spark }}-java-${{ matrix.java }}-scala-${{ matrix.scala }}-unit-test-log
path: |
**/target/test-reports/**
**/target/unit-tests.log

flink1:
runs-on: ubuntu-22.04
strategy:
Expand Down
6 changes: 6 additions & 0 deletions build/make-distribution.sh
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,9 @@ if [ "$SBT_ENABLED" == "true" ]; then
sbt_build_client -Pspark-2.4
sbt_build_client -Pspark-3.4
sbt_build_client -Pspark-3.5
export JAVA_HOME=$JAVA17_HOME
sbt_build_client -Pspark-4.0
export JAVA_HOME=$JAVA8_HOME
sbt_build_client -Pflink-1.16
sbt_build_client -Pflink-1.17
sbt_build_client -Pflink-1.18
Expand Down Expand Up @@ -384,6 +387,9 @@ else
build_spark_client -Pspark-2.4
build_spark_client -Pspark-3.4
build_spark_client -Pspark-3.5
export JAVA_HOME=$JAVA17_HOME
build_spark_client -Pspark-4.0
export JAVA_HOME=$JAVA8_HOME
build_flink_client -Pflink-1.16
build_flink_client -Pflink-1.17
build_flink_client -Pflink-1.18
Expand Down
6 changes: 6 additions & 0 deletions build/release/release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ ASF_PASSWORD=${ASF_PASSWORD:?"ASF_PASSWORD is required"}
RELEASE_RC_NO=${RELEASE_RC_NO:?"RELEASE_RC_NO is required, e.g. 0"}
JAVA8_HOME=${JAVA8_HOME:?"JAVA8_HOME is required"}
JAVA11_HOME=${JAVA11_HOME:?"JAVA11_HOME is required"}
JAVA17_HOME=${JAVA17_HOME:?"JAVA17_HOME is required"}

RELEASE_VERSION=$(awk -F'"' '/ThisBuild \/ version/ {print $2}' version.sbt)

Expand Down Expand Up @@ -110,6 +111,11 @@ upload_nexus_staging() {
echo "Deploying celeborn-client-spark-3-shaded_2.13"
${PROJECT_DIR}/build/sbt -Pspark-3.4 ++2.13.8 "clean;celeborn-client-spark-3-shaded/publishSigned"

export JAVA_HOME=$JAVA17_HOME
echo "Deploying celeborn-client-spark-4-shaded_2.13"
${PROJECT_DIR}/build/sbt -Pspark-4.0 "clean;celeborn-client-spark-4-shaded/publishSigned"
export JAVA_HOME=$JAVA8_HOME

echo "Deploying celeborn-client-flink-1.16-shaded_2.12"
${PROJECT_DIR}/build/sbt -Pflink-1.16 "clean;celeborn-client-flink-1_16-shaded/publishSigned"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,9 @@ class ApplicationHeartbeater(
}
}

private val lock = new AnyRef
def stop(): Unit = {
stopped.synchronized {
lock.synchronized {
if (!stopped) {
// Stop appHeartbeat first
logInfo(s"Stop Application heartbeat $appId")
Expand Down
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
<maven.plugin.shade.version>3.5.2</maven.plugin.shade.version>
<maven.plugin.spotless.version>2.24.1</maven.plugin.spotless.version>
<maven.plugin.surefire.version>3.0.0-M7</maven.plugin.surefire.version>
<maven.plugin.silencer.version>1.7.13</maven.plugin.silencer.version>
<maven.plugin.silencer.version>1.7.19</maven.plugin.silencer.version>
<maven.plugin.resources.version>3.3.1</maven.plugin.resources.version>
<openapi.generator.version>7.8.0</openapi.generator.version>

Expand Down Expand Up @@ -1527,10 +1527,10 @@
</modules>
<properties>
<lz4-java.version>1.8.0</lz4-java.version>
<scala.version>2.13.11</scala.version>
<scala.version>2.13.16</scala.version>
<scala.binary.version>2.13</scala.binary.version>
<spark.version>4.0.0-preview2</spark.version>
<zstd-jni.version>1.5.6-5</zstd-jni.version>
<spark.version>4.0.0</spark.version>
<zstd-jni.version>1.5.6-9</zstd-jni.version>
</properties>
</profile>

Expand Down
15 changes: 9 additions & 6 deletions project/CelebornBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -280,10 +280,10 @@ object CelebornCommonSettings {
val SCALA_2_12_15 = "2.12.15"
val SCALA_2_12_17 = "2.12.17"
val SCALA_2_12_18 = "2.12.18"
val scala213 = "2.13.5"
val SCALA_2_13_5 = "2.13.5"
val SCALA_2_13_8 = "2.13.8"
val scala213_11 = "2.13.11"
val ALL_SCALA_VERSIONS = Seq(SCALA_2_11_12, SCALA_2_12_10, SCALA_2_12_15, SCALA_2_12_17, SCALA_2_12_18, scala213, SCALA_2_13_8, scala213_11)
val SCALA_2_13_16 = "2.13.16"
val ALL_SCALA_VERSIONS = Seq(SCALA_2_11_12, SCALA_2_12_10, SCALA_2_12_15, SCALA_2_12_17, SCALA_2_12_18, SCALA_2_13_5, SCALA_2_13_8, SCALA_2_13_16)

val DEFAULT_SCALA_VERSION = SCALA_2_12_18

Expand Down Expand Up @@ -919,13 +919,16 @@ object Spark40 extends SparkClientProjects {
val sparkClientShadedProjectName = "celeborn-client-spark-4-shaded"

val lz4JavaVersion = "1.8.0"
val sparkProjectScalaVersion = "2.13.11"
val sparkProjectScalaVersion = "2.13.16"

val sparkVersion = "4.0.0-preview2"
val zstdJniVersion = "1.5.6-5"
val sparkVersion = "4.0.0"
val zstdJniVersion = "1.5.6-9"
val scalaBinaryVersion = "2.13"

override val sparkColumnarShuffleVersion: String = "4"

// TODO: Fix columnar shuffle build issue for Spark 4.0
override val includeColumnarShuffle: Boolean = false
}

trait SparkClientProjects {
Expand Down
Loading