# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. trigger: branches: include: - '*' # must quote since "*" is a YAML reserved character; we want a string pool: vmImage: 'ubuntu-18.04' parameters: - name: job1Modules type: object default: - 'hudi-common' - 'hudi-flink-datasource/hudi-flink' - 'hudi-flink-datasource/hudi-flink1.13.x' - 'hudi-flink-datasource/hudi-flink1.14.x' - name: job2Modules type: object default: - 'hudi-client/hudi-spark-client' - name: job3Modules type: object default: - 'hudi-cli' - 'hudi-client/hudi-client-common' - 'hudi-client/hudi-flink-client' - 'hudi-client/hudi-java-client' - 'hudi-sync/hudi-adb-sync' - 'hudi-sync/hudi-datahub-sync' - 'hudi-sync/hudi-hive-sync' - 'hudi-sync/hudi-sync-common' - 'hudi-utilities' - name: job4Modules type: object default: - '!hudi-cli' - '!hudi-client' - '!hudi-client/hudi-client-common' - '!hudi-client/hudi-flink-client' - '!hudi-client/hudi-java-client' - '!hudi-client/hudi-spark-client' - '!hudi-common' - '!hudi-examples' - '!hudi-examples/hudi-examples-common' - '!hudi-examples/hudi-examples-flink' - '!hudi-examples/hudi-examples-java' - '!hudi-examples/hudi-examples-spark' - '!hudi-flink-datasource' - '!hudi-flink-datasource/hudi-flink' - '!hudi-flink-datasource/hudi-flink1.13.x' - '!hudi-flink-datasource/hudi-flink1.14.x' - '!hudi-sync' - '!hudi-sync/hudi-adb-sync' - '!hudi-sync/hudi-datahub-sync' - '!hudi-sync/hudi-hive-sync' - '!hudi-sync/hudi-sync-common' - '!hudi-utilities' variables: BUILD_PROFILES: '-Dscala-2.11 -Dspark2 -Dflink1.14' PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true' MVN_OPTS_INSTALL: '-DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS)' MVN_OPTS_TEST: '-fae $(BUILD_PROFILES) $(PLUGIN_OPTS)' SPARK_VERSION: '2.4.4' HADOOP_VERSION: '2.7' SPARK_ARCHIVE: spark-$(SPARK_VERSION)-bin-hadoop$(HADOOP_VERSION) JOB1_MODULES: ${{ join(',',parameters.job1Modules) }} JOB2_MODULES: ${{ join(',',parameters.job2Modules) }} JOB3_MODULES: ${{ join(',',parameters.job3Modules) }} JOB4_MODULES: ${{ join(',',parameters.job4Modules) }} stages: - stage: test jobs: - job: UT_FT_1 displayName: UT FT common & flink & UT client/spark-client timeoutInMinutes: '120' steps: - task: Maven@3 displayName: maven install inputs: mavenPomFile: 'pom.xml' goals: 'clean install' options: $(MVN_OPTS_INSTALL) publishJUnitResults: false jdkVersionOption: '1.8' - task: Maven@3 displayName: UT common flink client/spark-client inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB1_MODULES),hudi-client/hudi-spark-client publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - task: Maven@3 displayName: FT common flink inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB1_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - job: UT_FT_2 displayName: FT client/spark-client timeoutInMinutes: '120' steps: - task: Maven@3 displayName: maven install inputs: mavenPomFile: 'pom.xml' goals: 'clean install' options: $(MVN_OPTS_INSTALL) publishJUnitResults: false jdkVersionOption: '1.8' - task: Maven@3 displayName: FT client/spark-client inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB2_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - job: UT_FT_3 displayName: UT FT clients & cli & utilities & sync timeoutInMinutes: '120' steps: - task: Maven@3 displayName: maven install inputs: mavenPomFile: 'pom.xml' goals: 'clean install' options: $(MVN_OPTS_INSTALL) publishJUnitResults: false jdkVersionOption: '1.8' - task: Maven@3 displayName: UT clients & cli & utilities & sync inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - task: Maven@3 displayName: FT clients & cli & utilities & sync inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB3_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - job: UT_FT_4 displayName: UT FT other modules timeoutInMinutes: '120' steps: - task: Maven@3 displayName: maven install inputs: mavenPomFile: 'pom.xml' goals: 'clean install' options: $(MVN_OPTS_INSTALL) publishJUnitResults: false jdkVersionOption: '1.8' - task: Maven@3 displayName: UT other modules inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB4_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - task: Maven@3 displayName: FT other modules inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB4_MODULES) publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - job: IT displayName: IT modules timeoutInMinutes: '120' steps: - task: Maven@3 displayName: maven install inputs: mavenPomFile: 'pom.xml' goals: 'clean install' options: $(MVN_OPTS_INSTALL) -Pintegration-tests publishJUnitResults: false jdkVersionOption: '1.8' - task: Maven@3 displayName: UT integ-test inputs: mavenPomFile: 'pom.xml' goals: 'test' options: $(MVN_OPTS_TEST) -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test publishJUnitResults: false jdkVersionOption: '1.8' mavenOptions: '-Xmx4g' - task: AzureCLI@2 displayName: Prepare for IT inputs: azureSubscription: apachehudici-service-connection scriptType: bash scriptLocation: inlineScript inlineScript: | echo 'Downloading $(SPARK_ARCHIVE)' az storage blob download -c ci-caches -n $(SPARK_ARCHIVE).tgz -f $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz --account-name apachehudici tar -xvf $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz -C $(Pipeline.Workspace)/ mkdir /tmp/spark-events/ - script: | export SPARK_HOME=$(Pipeline.Workspace)/$(SPARK_ARCHIVE) mvn $(MVN_OPTS_TEST) -Pintegration-tests verify displayName: IT