代码拉取完成,页面将自动刷新
同步操作将从 Apache/Hudi 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE:
# This config file defines how Azure CI runs tests with Spark 2.4 and Flink 1.18 profiles.
# PRs will need to keep in sync with master's version to trigger the CI runs.
trigger:
branches:
include:
- '*' # must quote since "*" is a YAML reserved character; we want a string
pool:
vmImage: 'ubuntu-22.04'
parameters:
- name: job1Modules
type: object
default:
- 'hudi-common'
- 'hudi-flink-datasource'
- 'hudi-flink-datasource/hudi-flink'
- 'hudi-flink-datasource/hudi-flink1.14.x'
- 'hudi-flink-datasource/hudi-flink1.15.x'
- 'hudi-flink-datasource/hudi-flink1.16.x'
- 'hudi-flink-datasource/hudi-flink1.17.x'
- 'hudi-flink-datasource/hudi-flink1.18.x'
- name: job2Modules
type: object
default:
- 'hudi-client/hudi-spark-client'
- 'hudi-spark-datasource/hudi-spark'
- name: job3UTModules
type: object
default:
- 'hudi-spark-datasource'
- 'hudi-spark-datasource/hudi-spark'
- 'hudi-spark-datasource/hudi-spark3.2.x'
- 'hudi-spark-datasource/hudi-spark3.2plus-common'
- 'hudi-spark-datasource/hudi-spark3-common'
- 'hudi-spark-datasource/hudi-spark-common'
- name: job4UTModules
type: object
default:
- '!hudi-hadoop-mr'
- '!hudi-client/hudi-java-client'
- '!hudi-client/hudi-spark-client'
- '!hudi-common'
- '!hudi-examples'
- '!hudi-examples/hudi-examples-common'
- '!hudi-examples/hudi-examples-flink'
- '!hudi-examples/hudi-examples-java'
- '!hudi-examples/hudi-examples-spark'
- '!hudi-flink-datasource'
- '!hudi-flink-datasource/hudi-flink'
- '!hudi-flink-datasource/hudi-flink1.14.x'
- '!hudi-flink-datasource/hudi-flink1.15.x'
- '!hudi-flink-datasource/hudi-flink1.16.x'
- '!hudi-flink-datasource/hudi-flink1.17.x'
- '!hudi-flink-datasource/hudi-flink1.18.x'
- '!hudi-spark-datasource'
- '!hudi-spark-datasource/hudi-spark'
- '!hudi-spark-datasource/hudi-spark3.2.x'
- '!hudi-spark-datasource/hudi-spark3.2plus-common'
- '!hudi-spark-datasource/hudi-spark3-common'
- '!hudi-spark-datasource/hudi-spark-common'
- name: job4FTModules
type: object
default:
- '!hudi-client/hudi-spark-client'
- '!hudi-common'
- '!hudi-examples'
- '!hudi-examples/hudi-examples-common'
- '!hudi-examples/hudi-examples-flink'
- '!hudi-examples/hudi-examples-java'
- '!hudi-examples/hudi-examples-spark'
- '!hudi-flink-datasource'
- '!hudi-flink-datasource/hudi-flink'
- '!hudi-flink-datasource/hudi-flink1.14.x'
- '!hudi-flink-datasource/hudi-flink1.15.x'
- '!hudi-flink-datasource/hudi-flink1.16.x'
- '!hudi-flink-datasource/hudi-flink1.17.x'
- '!hudi-flink-datasource/hudi-flink1.18.x'
- '!hudi-spark-datasource/hudi-spark'
variables:
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.18'
PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true -ntp -B -V -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn'
MVN_OPTS_INSTALL: '-Phudi-platform-service -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS) -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=5'
MVN_OPTS_TEST: '-fae -Pwarn-log $(BUILD_PROFILES) $(PLUGIN_OPTS)'
JOB1_MODULES: ${{ join(',',parameters.job1Modules) }}
JOB2_MODULES: ${{ join(',',parameters.job2Modules) }}
JOB3_MODULES: ${{ join(',',parameters.job3UTModules) }}
JOB4_UT_MODULES: ${{ join(',',parameters.job4UTModules) }}
JOB4_FT_MODULES: ${{ join(',',parameters.job4FTModules) }}
stages:
- stage: test
jobs:
- job: UT_FT_1
displayName: UT FT common & flink & UT client/spark-client
timeoutInMinutes: '150'
steps:
- task: Maven@4
displayName: maven install
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
displayName: UT common flink client/spark-client
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB1_MODULES),hudi-client/hudi-spark-client
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- task: Maven@4
displayName: FT common flink
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB1_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_2
displayName: FT client/spark-client & hudi-spark-datasource/hudi-spark
timeoutInMinutes: '150'
steps:
- task: Maven@4
displayName: maven install
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
displayName: FT client/spark-client & hudi-spark-datasource/hudi-spark
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB2_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_3
displayName: UT spark-datasource
timeoutInMinutes: '240'
steps:
- task: Maven@4
displayName: maven install
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
displayName: UT spark-datasource
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- job: UT_FT_4
displayName: UT FT other modules
timeoutInMinutes: '240'
steps:
- task: Maven@4
displayName: maven install
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
displayName: UT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB4_UT_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- task: Maven@4
displayName: FT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB4_FT_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。