test(concurrency): add tests for write conflicts with different conflict resolution strategies #46205
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Java CI | |
| on: | |
| push: | |
| branches: | |
| - master | |
| - 'release-*' | |
| - branch-0.x | |
| pull_request: | |
| paths-ignore: | |
| - '**.bmp' | |
| - '**.gif' | |
| - '**.jpg' | |
| - '**.jpeg' | |
| - '**.md' | |
| - '**.pdf' | |
| - '**.png' | |
| - '**.svg' | |
| - '**.yaml' | |
| - '.gitignore' | |
| branches: | |
| - master | |
| - 'release-*' | |
| - branch-0.x | |
| concurrency: | |
| group: ${{ github.ref }} | |
| cancel-in-progress: ${{ !contains(github.ref, 'master') && !contains(github.ref, 'branch-0.x') && !contains(github.ref, 'release-') }} | |
| env: | |
| MVN_ARGS: -e -ntp -B -V -Dgpg.skip -Djacoco.skip -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=5 | |
| SPARK_COMMON_MODULES: hudi-spark-datasource/hudi-spark,hudi-spark-datasource/hudi-spark-common | |
| JAVA_UT_FILTER1: -Dtest=!TestCOWDataSource,!TestMORDataSource,!TestHoodieFileSystemViews | |
| JAVA_UT_FILTER2: -Dtest=TestCOWDataSource,TestMORDataSource,TestHoodieFileSystemViews | |
| SCALA_TEST_DML_FILTER: -DwildcardSuites=org.apache.spark.sql.hudi.dml | |
| SCALA_TEST_OTHERS_FILTER: -DwildcardSuites=org.apache.hudi,org.apache.spark.hudi,org.apache.spark.sql.avro,org.apache.spark.sql.execution,org.apache.spark.sql.hudi.analysis,org.apache.spark.sql.hudi.command,org.apache.spark.sql.hudi.common,org.apache.spark.sql.hudi.ddl,org.apache.spark.sql.hudi.procedure,org.apache.spark.sql.hudi.feature | |
| FLINK_IT_FILTER1: -Dit.test=ITTestHoodieDataSource | |
| FLINK_IT_FILTER2: -Dit.test=!ITTestHoodieDataSource | |
| jobs: | |
| validate-source: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| cache: maven | |
| - name: Check Binary Files | |
| run: ./scripts/release/validate_source_binary_files.sh | |
| - name: Check Copyright | |
| run: | | |
| ./scripts/release/create_source_directory.sh hudi-tmp-repo | |
| cd hudi-tmp-repo | |
| ./scripts/release/validate_source_copyright.sh | |
| - name: RAT check | |
| run: ./scripts/release/validate_source_rat.sh | |
| test-spark-java-tests-part1: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Java UT 1 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| if: ${{ !endsWith(env.SPARK_PROFILE, '3.5') || !endsWith(env.SCALA_PROFILE, '2.12') }} # skip test Spark 3.5 and Scala 2.12 as it's covered by Azure CI | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java-tests-part2: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl hudi-examples/hudi-examples-spark $MVN_ARGS | |
| - name: Java UT 2 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTA - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java-tests-part3: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Java FTB - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-b -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTC - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-c -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-scala-dml-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-scala-other-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-hudi-hadoop-mr-and-hudi-java-client: | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 40 | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| flinkProfile: "flink1.20" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Generate Maven Wrapper | |
| run: | |
| mvn -N io.takari:maven:wrapper | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| run: | |
| ./mvnw clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"FLINK_PROFILE" -DskipTests=true -Phudi-platform-service $MVN_ARGS -am -pl hudi-hadoop-mr,hudi-client/hudi-java-client | |
| - name: UT - hudi-hadoop-mr and hudi-client/hudi-java-client | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| run: | |
| ./mvnw test -Punit-tests -fae -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"FLINK_PROFILE" -pl hudi-hadoop-mr,hudi-client/hudi-java-client $MVN_ARGS | |
| test-spark-java17-java-tests-part1: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Java UT 1 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java17-java-tests-part2: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl hudi-examples/hudi-examples-spark $MVN_ARGS | |
| - name: Java UT 2 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTA - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java17-java-tests-part3: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Java FTB - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTC - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-c -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java17-scala-dml-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java17-scala-other-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java11-17-java-tests-part1: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Java UT 1 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER1 -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java11-17-java-tests-part2: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl hudi-examples/hudi-examples-spark $MVN_ARGS | |
| - name: Java UT 2 - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests $JAVA_UT_FILTER2 -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTA - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java11-17-java-tests-part3: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Java FTB - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-b -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Java FTC - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests-c -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java11-17-scala-dml-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_DML_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-spark-java11-17-scala-other-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Scala UT - Common & Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| - name: Scala FT - Spark | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests $SCALA_TEST_OTHERS_FILTER -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | |
| test-flink-1: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - flinkProfile: "flink1.17" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.12.3' | |
| - flinkProfile: "flink1.18" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.13.1' | |
| - flinkProfile: "flink1.19" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.13.1' | |
| - flinkProfile: "flink1.20" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.13.1' | |
| - flinkProfile: "flink2.0" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.14.4' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: 'scala-2.12' | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -DskipTests=true $MVN_ARGS | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: 'scala-2.12' | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | |
| mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -pl hudi-examples/hudi-examples-flink $MVN_ARGS | |
| - name: Integration Test 1 | |
| env: | |
| SCALA_PROFILE: 'scala-2.12' | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| if: ${{ endsWith(env.FLINK_PROFILE, '1.20') }} | |
| run: | | |
| mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -DskipTests=true $MVN_ARGS | |
| mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" $FLINK_IT_FILTER1 -pl hudi-flink-datasource/hudi-flink $MVN_ARGS | |
| test-flink-2: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - flinkProfile: "flink1.20" | |
| flinkAvroVersion: "1.11.4" | |
| flinkParquetVersion: '1.13.1' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: 'scala-2.12' | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | |
| mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -DskipTests=true $MVN_ARGS | |
| - name: Integration Test 2 | |
| env: | |
| SCALA_PROFILE: 'scala-2.12' | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| if: ${{ endsWith(env.FLINK_PROFILE, '1.20') }} | |
| run: | | |
| mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -DskipTests=true $MVN_ARGS | |
| mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" $FLINK_IT_FILTER2 -pl hudi-flink-datasource/hudi-flink $MVN_ARGS | |
| docker-java17-test: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: 'scala-2.13' | |
| flinkProfile: 'flink1.20' | |
| sparkProfile: 'spark3.5' | |
| sparkRuntime: 'spark3.5.0' | |
| - scalaProfile: 'scala-2.12' | |
| flinkProfile: 'flink1.20' | |
| sparkProfile: 'spark3.5' | |
| sparkRuntime: 'spark3.5.0' | |
| - scalaProfile: 'scala-2.12' | |
| flinkProfile: 'flink1.20' | |
| sparkProfile: 'spark3.4' | |
| sparkRuntime: 'spark3.4.0' | |
| - scalaProfile: 'scala-2.13' | |
| flinkProfile: 'flink1.20' | |
| sparkProfile: 'spark4.0' | |
| sparkRuntime: 'spark4.0.0' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: UT/FT - Docker Test - OpenJDK 17 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| if: ${{ env.SPARK_PROFILE >= 'spark3.4' }} # Only support Spark 3.4 for now | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/run_docker_java17.sh | |
| validate-bundles: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: 'scala-2.13' | |
| flinkProfile: 'flink1.20' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.13.1' | |
| sparkProfile: 'spark3.5' | |
| sparkRuntime: 'spark3.5.1' | |
| - scalaProfile: 'scala-2.13' | |
| flinkProfile: 'flink1.19' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.13.1' | |
| sparkProfile: 'spark3.5' | |
| sparkRuntime: 'spark3.5.1' | |
| - scalaProfile: 'scala-2.12' | |
| flinkProfile: 'flink1.18' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.13.1' | |
| sparkProfile: 'spark3.4' | |
| sparkRuntime: 'spark3.4.3' | |
| - scalaProfile: 'scala-2.12' | |
| flinkProfile: 'flink1.17' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.12.3' | |
| sparkProfile: 'spark3.3' | |
| sparkRuntime: 'spark3.3.4' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | | |
| if [ "$SCALA_PROFILE" == "scala-2.13" ]; then | |
| mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl packaging/hudi-hadoop-mr-bundle,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle,packaging/hudi-cli-bundle -am | |
| else | |
| mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"$FLINK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS | |
| # TODO remove the sudo below. It's a needed workaround as detailed in HUDI-5708. | |
| sudo chown -R "$USER:$(id -g -n)" hudi-platform-service/hudi-metaserver/target/generated-sources | |
| mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl packaging/hudi-flink-bundle -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" | |
| fi | |
| - name: IT - Bundle Validation - OpenJDK 8 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java8 $HUDI_VERSION openjdk8 | |
| - name: IT - Bundle Validation - OpenJDK 11 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 | |
| - name: IT - Bundle Validation - OpenJDK 17 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | |
| validate-bundle-spark4: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: 'scala-2.13' | |
| flinkProfile: 'flink1.20' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.13.1' | |
| sparkProfile: 'spark4.0' | |
| sparkRuntime: 'spark4.0.0' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | | |
| mvn clean package -T 2 -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true -Dmaven.javadoc.skip=true $MVN_ARGS -pl packaging/hudi-hadoop-mr-bundle,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle,packaging/hudi-cli-bundle -am | |
| - name: IT - Spark4 Bundle Validation - OpenJDK 17 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | |
| # flink 2.0 only support Java 11 and above version | |
| validate-bundles-java11: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: 'scala-2.12' | |
| flinkProfile: 'flink2.0' | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.14.4' | |
| sparkProfile: 'spark3.5' | |
| sparkRuntime: 'spark3.5.1' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 11 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '11' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | | |
| mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -DskipTests=true $MVN_ARGS -pl packaging/hudi-flink-bundle -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" | |
| - name: IT - Bundle Validation - OpenJDK 11 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 | |
| - name: IT - Bundle Validation - OpenJDK 17 | |
| env: | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| run: | | |
| HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| ./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | |
| # validate-bundles-java11: | |
| # runs-on: ubuntu-latest | |
| # strategy: | |
| # matrix: | |
| # include: | |
| # - scalaProfile: 'scala-2.13' | |
| # flinkProfile: 'flink1.20' | |
| # sparkProfile: 'spark3.5' | |
| # sparkRuntime: 'spark3.5.0' | |
| # - scalaProfile: 'scala-2.12' | |
| # flinkProfile: 'flink1.20' | |
| # sparkProfile: 'spark3.5' | |
| # sparkRuntime: 'spark3.5.0' | |
| # steps: | |
| # - uses: actions/checkout@v3 | |
| # - name: Set up JDK 11 | |
| # uses: actions/setup-java@v3 | |
| # with: | |
| # java-version: '11' | |
| # distribution: 'temurin' | |
| # architecture: x64 | |
| # - name: Build Project | |
| # env: | |
| # FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| # SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| # SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| # run: | | |
| # if [ "$SCALA_PROFILE" == "scala-2.13" ]; then | |
| # mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true -pl packaging/hudi-hadoop-mr-bundle,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle -am | |
| # else | |
| # mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true | |
| # # TODO remove the sudo below. It's a needed workaround as detailed in HUDI-5708. | |
| # sudo chown -R "$USER:$(id -g -n)" hudi-platform-service/hudi-metaserver/target/generated-sources | |
| # mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true -pl packaging/hudi-flink-bundle -am -Davro.version=1.10.0 | |
| # fi | |
| # - name: IT - Bundle Validation - OpenJDK 11 | |
| # env: | |
| # FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| # SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| # SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| # SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| # run: | | |
| # HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| # ./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 | |
| # - name: IT - Bundle Validation - OpenJDK 17 | |
| # env: | |
| # FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| # SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| # SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | |
| # SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| # run: | | |
| # HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | |
| # ./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | |
| integration-tests: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - sparkProfile: 'spark3.5' | |
| flinkProfile: 'flink1.20' | |
| sparkArchive: 'spark-3.5.3/spark-3.5.3-bin-hadoop3.tgz' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 8 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '8' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Check disk space | |
| run: df -h | |
| - name: Build Project | |
| env: | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| SCALA_PROFILE: '-Dscala-2.12 -Dscala.binary.version=2.12' | |
| run: | |
| mvn clean install -T 2 $SCALA_PROFILE -D"$SPARK_PROFILE" -D"$FLINK_PROFILE" -Pintegration-tests -DskipTests=true $MVN_ARGS | |
| - name: 'UT integ-test' | |
| env: | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SCALA_PROFILE: '-Dscala-2.12 -Dscala.binary.version=2.12' | |
| run: | |
| mvn test $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test $MVN_ARGS | |
| - name: 'Free space' | |
| run: | | |
| sudo rm -rf /usr/share/dotnet | |
| sudo rm -rf /usr/local/lib/android | |
| sudo rm -rf /opt/ghc | |
| sudo rm -rf /usr/local/share/boost | |
| docker system prune --all --force --volumes | |
| - name: 'IT' | |
| env: | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_ARCHIVE: ${{ matrix.sparkArchive }} | |
| SCALA_PROFILE: '-Dscala-2.12 -Dscala.binary.version=2.12' | |
| run: | | |
| echo "Downloading $SPARK_ARCHIVE" | |
| curl https://archive.apache.org/dist/spark/$SPARK_ARCHIVE --create-dirs -o $GITHUB_WORKSPACE/$SPARK_ARCHIVE | |
| tar -xvf $GITHUB_WORKSPACE/$SPARK_ARCHIVE -C $GITHUB_WORKSPACE/ | |
| mkdir /tmp/spark-events/ | |
| SPARK_ARCHIVE_BASENAME=$(basename $SPARK_ARCHIVE) | |
| export SPARK_HOME=$GITHUB_WORKSPACE/${SPARK_ARCHIVE_BASENAME%.*} | |
| rm -f $GITHUB_WORKSPACE/$SPARK_ARCHIVE | |
| mvn verify $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -pl !hudi-flink-datasource/hudi-flink $MVN_ARGS | |
| build-spark-java17: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.3" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.4" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | |
| - scalaProfile: "scala-2.12" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark3.5" | |
| sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | |
| - scalaProfile: "scala-2.13" | |
| sparkProfile: "spark4.0" | |
| sparkModules: "hudi-spark-datasource/hudi-spark4.0.x" | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| SPARK_MODULES: ${{ matrix.sparkModules }} | |
| run: | |
| mvn clean install -T 2 -Djava17 -Djava.version=17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| SPARK_PROFILE: ${{ matrix.sparkProfile }} | |
| run: | |
| mvn test -Punit-tests -Djava17 -Djava.version=17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl hudi-examples/hudi-examples-spark $MVN_ARGS | |
| build-flink-java17: | |
| runs-on: ubuntu-latest | |
| strategy: | |
| matrix: | |
| include: | |
| - scalaProfile: "scala-2.12" | |
| flinkProfile: "flink1.20" | |
| flinkAvroVersion: '1.11.4' | |
| flinkParquetVersion: '1.13.1' | |
| steps: | |
| - uses: actions/checkout@v3 | |
| - name: Set up JDK 17 | |
| uses: actions/setup-java@v3 | |
| with: | |
| java-version: '17' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build Project | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| FLINK_AVRO_VERSION: ${{ matrix.flinkAvroVersion }} | |
| FLINK_PARQUET_VERSION: ${{ matrix.flinkParquetVersion }} | |
| run: | |
| mvn clean install -T 2 -Djava17 -Djava.version=17 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink -am -Davro.version="$FLINK_AVRO_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -Dparquet.version="$FLINK_PARQUET_VERSION" -DskipTests=true $MVN_ARGS | |
| - name: Quickstart Test | |
| env: | |
| SCALA_PROFILE: ${{ matrix.scalaProfile }} | |
| FLINK_PROFILE: ${{ matrix.flinkProfile }} | |
| run: | |
| mvn test -Punit-tests -Djava17 -Djava.version=17 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink $MVN_ARGS | |
| test-hudi-trino-plugin: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v3 | |
| - name: Set up JDK 23 | |
| uses: actions/setup-java@v3 | |
| with: | |
| # Note: We are not caching here again, as we want to use the .m2 repository populated by | |
| # the previous step | |
| java-version: '23' | |
| distribution: 'temurin' | |
| architecture: x64 | |
| - name: Build hudi-trino-plugin with JDK 23 | |
| working-directory: ./hudi-trino-plugin | |
| run: | |
| mvn clean install -DskipTests | |
| - name: Test hudi-trino-plugin with JDK 23 | |
| working-directory: ./hudi-trino-plugin | |
| run: | |
| mvn test |