代码拉取完成,页面将自动刷新
同步操作将从 Apache/Hudi 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi</artifactId>
<packaging>pom</packaging>
<version>1.0.0-SNAPSHOT</version>
<description>Apache Hudi brings stream style processing on big data</description>
<url>https://github.com/apache/hudi</url>
<name>Hudi</name>
<modules>
<module>hudi-common</module>
<module>hudi-cli</module>
<module>hudi-client</module>
<module>hudi-aws</module>
<module>hudi-gcp</module>
<module>hudi-hadoop-common</module>
<module>hudi-hadoop-mr</module>
<module>hudi-io</module>
<module>hudi-spark-datasource</module>
<module>hudi-timeline-service</module>
<module>hudi-utilities</module>
<module>hudi-sync</module>
<module>packaging/hudi-hadoop-mr-bundle</module>
<module>packaging/hudi-datahub-sync-bundle</module>
<module>packaging/hudi-hive-sync-bundle</module>
<module>packaging/hudi-aws-bundle</module>
<module>packaging/hudi-gcp-bundle</module>
<module>packaging/hudi-spark-bundle</module>
<module>packaging/hudi-presto-bundle</module>
<module>packaging/hudi-utilities-bundle</module>
<module>packaging/hudi-utilities-slim-bundle</module>
<module>packaging/hudi-timeline-server-bundle</module>
<module>packaging/hudi-trino-bundle</module>
<module>hudi-examples</module>
<module>hudi-flink-datasource</module>
<module>hudi-kafka-connect</module>
<module>packaging/hudi-flink-bundle</module>
<module>packaging/hudi-kafka-connect-bundle</module>
<module>packaging/hudi-cli-bundle</module>
<module>hudi-tests-common</module>
</modules>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<organization>
<name>The Apache Software Foundation</name>
<url>https://www.apache.org</url>
</organization>
<properties>
<maven-jar-plugin.version>3.2.0</maven-jar-plugin.version>
<maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
<maven-shade-plugin.version>3.4.0</maven-shade-plugin.version>
<maven-javadoc-plugin.version>3.1.1</maven-javadoc-plugin.version>
<maven-compiler-plugin.version>3.8.0</maven-compiler-plugin.version>
<maven-deploy-plugin.version>2.4</maven-deploy-plugin.version>
<genjavadoc-plugin.version>0.15</genjavadoc-plugin.version>
<build-helper-maven-plugin.version>1.7</build-helper-maven-plugin.version>
<maven-enforcer-plugin.version>3.0.0-M1</maven-enforcer-plugin.version>
<maven-docker-plugin.version>0.42.1</maven-docker-plugin.version>
<java.version>1.8</java.version>
<kryo.shaded.version>4.0.2</kryo.shaded.version>
<fasterxml.spark3.version>2.10.0</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<kafka.version>2.0.0</kafka.version>
<kafka.spark3.version>2.8.0</kafka.spark3.version>
<pulsar.version>3.0.2</pulsar.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<pulsar.spark.scala11.version>2.4.5</pulsar.spark.scala11.version>
<pulsar.spark.scala12.version>3.1.1.4</pulsar.spark.scala12.version>
<confluent.version>5.3.4</confluent.version>
<glassfish.version>2.17</glassfish.version>
<glassfish.el.version>3.0.1-b12</glassfish.el.version>
<parquet.version>1.10.1</parquet.version>
<junit.jupiter.version>5.7.2</junit.jupiter.version>
<junit.vintage.version>5.7.2</junit.vintage.version>
<junit.platform.version>1.7.2</junit.platform.version>
<mockito.jupiter.version>3.3.3</mockito.jupiter.version>
<log4j2.version>2.17.2</log4j2.version>
<slf4j.version>1.7.36</slf4j.version>
<joda.version>2.9.9</joda.version>
<hadoop.version>2.10.2</hadoop.version>
<hive.groupid>org.apache.hive</hive.groupid>
<hive.version>2.3.1</hive.version>
<hive.parquet.version>1.10.1</hive.parquet.version>
<hive.avro.version>1.8.2</hive.avro.version>
<presto.version>0.273</presto.version>
<trino.version>390</trino.version>
<hive.exec.classifier>core</hive.exec.classifier>
<metrics.version>4.1.1</metrics.version>
<orc.spark.version>1.6.0</orc.spark.version>
<orc.flink.version>1.5.6</orc.flink.version>
<roaringbitmap.version>0.9.47</roaringbitmap.version>
<airlift.version>0.25</airlift.version>
<prometheus.version>0.8.0</prometheus.version>
<aws.sdk.httpclient.version>4.5.13</aws.sdk.httpclient.version>
<aws.sdk.httpcore.version>4.4.13</aws.sdk.httpcore.version>
<http.version>4.4.1</http.version>
<spark.version>${spark3.version}</spark.version>
<spark2.version>2.4.4</spark2.version>
<spark3.version>3.4.1</spark3.version>
<sparkbundle.version></sparkbundle.version>
<flink1.18.version>1.18.0</flink1.18.version>
<flink1.17.version>1.17.1</flink1.17.version>
<flink1.16.version>1.16.2</flink1.16.version>
<flink1.15.version>1.15.1</flink1.15.version>
<flink1.14.version>1.14.5</flink1.14.version>
<flink.version>${flink1.18.version}</flink.version>
<hudi.flink.module>hudi-flink1.18.x</hudi.flink.module>
<flink.bundle.version>1.18</flink.bundle.version>
<!-- This is fixed to match with version from flink-avro -->
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.13.1</flink.format.parquet.version>
<flink.connector.kafka.version>3.0.0-1.17</flink.connector.kafka.version>
<flink.runtime.artifactId>flink-runtime</flink.runtime.artifactId>
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_2.12</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
<rocksdbjni.version>7.5.3</rocksdbjni.version>
<spark30.version>3.0.2</spark30.version>
<spark31.version>3.1.3</spark31.version>
<spark32.version>3.2.3</spark32.version>
<spark33.version>3.3.1</spark33.version>
<spark34.version>3.4.1</spark34.version>
<spark35.version>3.5.0</spark35.version>
<hudi.spark.module>hudi-spark3.2.x</hudi.spark.module>
<!-- NOTE: Different Spark versions might require different number of shared
modules being incorporated, hence we're creating multiple placeholders
(hudi.spark.common.modules.*) -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<avro.version>1.8.2</avro.version>
<caffeine.version>2.9.1</caffeine.version>
<commons.io.version>2.11.0</commons.io.version>
<scala11.version>2.11.12</scala11.version>
<scala12.version>2.12.10</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.collection-compat.version>2.8.1</scala.collection-compat.version>
<scala.binary.version>2.12</scala.binary.version>
<apache-rat-plugin.version>0.13</apache-rat-plugin.version>
<scala-maven-plugin.version>3.3.1</scala-maven-plugin.version>
<scalatest.spark_pre31.version>3.0.1</scalatest.spark_pre31.version>
<scalatest.spark3.version>3.1.0</scalatest.spark3.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<surefire-log4j.file>log4j2-surefire.properties</surefire-log4j.file>
<thrift.version>0.13.0</thrift.version>
<javalin.version>4.6.7</javalin.version>
<jetty.version>9.4.53.v20231009</jetty.version>
<htrace.version>3.1.0-incubating</htrace.version>
<hbase.version>2.4.13</hbase.version>
<h2.version>1.4.199</h2.version>
<awaitility.version>3.1.2</awaitility.version>
<skipTests>false</skipTests>
<skipUTs>${skipTests}</skipUTs>
<skipFTs>${skipTests}</skipFTs>
<skipITs>${skipTests}</skipITs>
<skip.hudi-spark2.unit.tests>${skipTests}</skip.hudi-spark2.unit.tests>
<skip.hudi-spark3.unit.tests>${skipTests}</skip.hudi-spark3.unit.tests>
<skipDocker>${skipTests}</skipDocker>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<main.basedir>${project.basedir}</main.basedir>
<spark.bundle.hive.scope>provided</spark.bundle.hive.scope>
<spark.bundle.hive.shade.prefix/>
<utilities.bundle.hive.scope>provided</utilities.bundle.hive.scope>
<utilities.bundle.hive.shade.prefix/>
<argLine>-Xmx2g -Xms128m</argLine>
<jacoco.version>0.8.8</jacoco.version>
<presto.bundle.bootstrap.scope>compile</presto.bundle.bootstrap.scope>
<presto.bundle.bootstrap.shade.prefix>org.apache.hudi.</presto.bundle.bootstrap.shade.prefix>
<trino.bundle.bootstrap.scope>compile</trino.bundle.bootstrap.scope>
<trino.bundle.bootstrap.shade.prefix>org.apache.hudi.</trino.bundle.bootstrap.shade.prefix>
<shadeSources>true</shadeSources>
<zk-curator.version>2.7.1</zk-curator.version>
<disruptor.version>3.4.2</disruptor.version>
<antlr.version>4.8</antlr.version>
<aws.sdk.version>2.18.40</aws.sdk.version>
<proto.version>3.21.7</proto.version>
<protoc.version>3.21.5</protoc.version>
<dynamodb.lockclient.version>1.2.0</dynamodb.lockclient.version>
<zookeeper.version>3.5.7</zookeeper.version>
<openjdk.jol.version>0.16</openjdk.jol.version>
<google.cloud.pubsub.version>1.120.0</google.cloud.pubsub.version>
<gcs.connector.version>hadoop2-2.2.7</gcs.connector.version>
<dynamodb-local.port>8000</dynamodb-local.port>
<dynamodb-local.endpoint>http://localhost:${dynamodb-local.port}</dynamodb-local.endpoint>
<moto.port>5000</moto.port>
<moto.endpoint>http://localhost:${moto.port}</moto.endpoint>
<springboot.version>2.7.3</springboot.version>
<spring.shell.version>2.1.1</spring.shell.version>
<snappy.version>1.1.8.3</snappy.version>
</properties>
<scm>
<connection>scm:git:git@github.com:apache/hudi.git</connection>
<developerConnection>scm:git:git@github.com:apache/hudi.git</developerConnection>
<url>git@github.com:apache/hudi.git</url>
<tag>HEAD</tag>
</scm>
<issueManagement>
<system>JIRA</system>
<url>https://issues.apache.org/jira/browse/HUDI</url>
</issueManagement>
<mailingLists>
<mailingList>
<name>Dev Mailing List</name>
<post>dev@hudi.apache.org</post>
<subscribe>dev-subscribe@hudi.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
<mailingList>
<name>User Mailing List</name>
<post>users@hudi.apache.org</post>
<subscribe>users-subscribe@hudi.apache.org</subscribe>
<unsubscribe>users-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
<mailingList>
<name>Commits Mailing List</name>
<post>commits@hudi.apache.org</post>
<subscribe>commits-subscribe@hudi.apache.org</subscribe>
<unsubscribe>commits-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.0</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<!-- Set consoleOutput to true to see minor checkstyle issues -->
<consoleOutput>false</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<!-- NOTE: This property is only available in Maven >= 3.3.1 -->
<propertyExpansion>basedir=${maven.multiModuleProjectDirectory}</propertyExpansion>
<excludes>**\/generated-sources\/,org/apache/hudi/metaserver/thrift/*</excludes>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<!--
See https://jira.apache.org/jira/browse/HUDI-304
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<version>1.24.3</version>
<configuration>
<java>
<eclipse>
<file>${main.basedir}/style/eclipse-java-google-style.xml</file>
<version>4.10.0</version>
</eclipse>
</java>
<scala>
<trimTrailingWhitespace />
</scala>
</configuration>
<executions>
<execution>
<id>spotless-check</id>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>2.5.3</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<useReleaseProfile>false</useReleaseProfile>
<releaseProfiles>release,integration-tests</releaseProfiles>
<goals>deploy</goals>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>${maven-deploy-plugin.version}</version>
<executions>
<execution>
<id>default-deploy</id>
<phase>deploy</phase>
<goals>
<goal>deploy</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<configuration>
<skip>${skipITs}</skip>
<argLine>@{argLine}</argLine>
<useSystemClassLoader>false</useSystemClassLoader>
<systemPropertyVariables>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>${maven-enforcer-plugin.version}</version>
<executions>
<execution>
<id>enforce-logging</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes>
<exclude>org.sl4fj:slf4j-simple</exclude>
<exclude>org.sl4fj:slf4j-jdk14</exclude>
<exclude>org.sl4fj:slf4j-nop</exclude>
<exclude>org.sl4fj:slf4j-jcl</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.logback:logback-classic</exclude>
<!-- NOTE: We're banning any HBase deps versions other than the approved ${hbase.version},
which is aimed at preventing the classpath collisions w/ transitive deps usually) -->
<exclude>org.apache.hbase:hbase-common:*</exclude>
<exclude>org.apache.hbase:hbase-client:*</exclude>
<exclude>org.apache.hbase:hbase-server:*</exclude>
<!--To upgrade snappy because pre 1.1.8.2 does not work on m1 mac-->
<exclude>org.xerial.snappy:snappy-java:*</exclude>
</excludes>
<includes>
<include>org.slf4j:slf4j-simple:*:*:test</include>
<include>org.apache.hbase:hbase-common:${hbase.version}</include>
<include>org.apache.hbase:hbase-client:${hbase.version}</include>
<include>org.apache.hbase:hbase-server:${hbase.version}</include>
<!--To upgrade snappy because pre 1.1.8.2 does not work on m1 mac-->
<include>org.xerial.snappy:snappy-java:${snappy.version}</include>
</includes>
</bannedDependencies>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>${maven-docker-plugin.version}</version>
<configuration>
<skip>${skipDocker}</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<configuration>
<!-- common to all bundles -->
<artifactSet>
<includes>
<include>org.apache.hudi:hudi-io</include>
<include>io.airlift:aircompressor</include>
<!-- org.apache.httpcomponents -->
<include>org.apache.httpcomponents:httpclient</include>
<include>org.apache.httpcomponents:httpcore</include>
<include>org.apache.httpcomponents:fluent-hc</include>
<!-- hbase -->
<include>org.apache.hbase:hbase-client</include>
<include>org.apache.hbase:hbase-common</include>
<include>org.apache.hbase:hbase-hadoop-compat</include>
<include>org.apache.hbase:hbase-hadoop2-compat</include>
<include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-protocol</include>
<include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-miscellaneous</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-netty</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-protobuf</include>
<include>org.apache.hbase.thirdparty:hbase-unsafe</include>
<include>org.apache.htrace:htrace-core4</include>
<!-- afterburner module for jackson performance -->
<include>com.fasterxml.jackson.module:jackson-module-afterburner</include>
<!-- native HFile reader uses protobuf -->
<include>com.google.protobuf:protobuf-java</include>
</includes>
</artifactSet>
<relocations>
<!-- org.apache.httpcomponents -->
<relocation>
<pattern>org.apache.http.</pattern>
<shadedPattern>org.apache.hudi.org.apache.http.</shadedPattern>
</relocation>
<!-- hbase -->
<relocation>
<pattern>org.apache.hadoop.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.hbase.</shadedPattern>
<excludes>
<exclude>org.apache.hadoop.hbase.KeyValue$KeyComparator</exclude>
</excludes>
</relocation>
<relocation>
<pattern>org.apache.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hbase.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.htrace.</pattern>
<shadedPattern>org.apache.hudi.org.apache.htrace.</shadedPattern>
</relocation>
<!-- hbase
The classes below in org.apache.hadoop.metrics2 package come from
hbase-hadoop-compat and hbase-hadoop2-compat, which have to be shaded one by one,
instead of shading all classes under org.apache.hadoop.metrics2 including ones
from hadoop. -->
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricsExecutor</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricsExecutor
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.impl.JmxCacheBuster</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.impl.JmxCacheBuster
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper</pattern>
<shadedPattern>
org.apache.hudi.org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MetricsExecutorImpl</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MetricsExecutorImpl
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableFastCounter</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableFastCounter
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableRangeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableRangeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableSizeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableSizeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableTimeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableTimeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricQuantile</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricQuantile
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricSampleQuantiles</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricSampleQuantiles
</shadedPattern>
</relocation>
<relocation>
<pattern>com.fasterxml.jackson.module</pattern>
<shadedPattern>org.apache.hudi.com.fasterxml.jackson.module
</shadedPattern>
</relocation>
<relocation>
<pattern>com.google.protobuf.</pattern>
<shadedPattern>org.apache.hudi.com.google.protobuf.</shadedPattern>
</relocation>
</relocations>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<rerunFailingTestsCount>3</rerunFailingTestsCount>
<argLine>@{argLine}</argLine>
<trimStackTrace>false</trimStackTrace>
<systemPropertyVariables>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
<forkedProcessExitTimeoutInSeconds>30</forkedProcessExitTimeoutInSeconds>
</configuration>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<version>2.2.0</version>
<configuration>
<skipTests>${skipUTs}</skipTests>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>TestSuite.txt</filereports>
<systemProperties>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemProperties>
</configuration>
<executions>
<execution>
<id>test</id>
<goals>
<goal>test</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala-maven-plugin.version}</version>
<configuration>
<checkMultipleScalaVersions>false</checkMultipleScalaVersions>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<!-- excludes are inherited -->
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache-rat-plugin.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<numUnapprovedLicenses>0</numUnapprovedLicenses>
<licenses>
<!-- Enforce this license:
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL2 </licenseFamilyCategory>
<licenseFamilyName>Apache License 2.0</licenseFamilyName>
<notes />
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF) under one</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>NOTICE</exclude>
<exclude>DISCLAIMER</exclude>
<exclude>**/.*</exclude>
<exclude>**/emptyFile</exclude>
<exclude>**/*.json</exclude>
<exclude>**/*.hfile</exclude>
<exclude>**/*.log</exclude>
<exclude>**/*.sqltemplate</exclude>
<exclude>**/compose_env</exclude>
<exclude>**/*NOTICE*</exclude>
<exclude>**/*LICENSE*</exclude>
<exclude>**/dependency-reduced-pom.xml</exclude>
<exclude>**/test/resources/*.data</exclude>
<exclude>**/test/resources/*.commit</exclude>
<exclude>**/test/resources/**/*.txt</exclude>
<exclude>**/test/resources/**/*.avsc</exclude>
<exclude>**/target/**</exclude>
<exclude>**/generated-sources/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/banner.txt</exclude>
<!-- local files not in version control -->
<exclude>**/*.iml</exclude>
<exclude>.mvn/**</exclude>
</excludes>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<version>${avro.version}</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>schema</goal>
</goals>
<configuration>
<sourceDirectory>${project.basedir}/src/main/avro/</sourceDirectory>
<outputDirectory>${project.build.directory}/generated-sources/src/main/java/
</outputDirectory>
<stringType>String</stringType>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
<version>1.0.0</version>
<configuration>
<verbose>false</verbose>
<failOnViolation>true</failOnViolation>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<failOnWarning>false</failOnWarning>
<sourceDirectory>${project.basedir}/src/main/scala</sourceDirectory>
<testSourceDirectory>${project.basedir}/src/test/scala</testSourceDirectory>
<configLocation>${main.basedir}/style/scalastyle.xml</configLocation>
<outputEncoding>UTF-8</outputEncoding>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.github.os72</groupId>
<artifactId>protoc-jar-maven-plugin</artifactId>
<version>3.11.4</version>
<executions>
<execution>
<id>proto-compile</id>
<phase>generate-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<inputDirectories>
<include>src/main/resources</include>
</inputDirectories>
</configuration>
</execution>
<execution>
<id>proto-test-compile</id>
<phase>generate-test-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<addSources>test</addSources>
<inputDirectories>
<include>src/test/resources</include>
</inputDirectories>
</configuration>
</execution>
</executions>
<configuration>
<protocArtifact>com.google.protobuf:protoc:${proto.version}</protocArtifact>
<protocVersion>${protoc.version}</protocVersion>
<includeStdTypes>true</includeStdTypes>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencyManagement>
<dependencies>
<!-- Scala -->
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-collection-compat_${scala.binary.version}</artifactId>
<version>${scala.collection-compat.version}</version>
</dependency>
<dependency>
<groupId>org.openjdk.jol</groupId>
<artifactId>jol-core</artifactId>
<version>${openjdk.jol.version}</version>
</dependency>
<!-- Logging -->
<!-- NOTE: All the following deps have to have "provided" scope to make sure these are not conflicting
w/ implementations that are using Hudi as a library. For ex, all Spark < 3.3 are still relying on Log4j1
and therefore if we be bringing Log4j2 bridge for V1 on the classpath (log4j-1.2-api), it'll fail w/
`ClassNotFoundException`, since the bridge would be expecting Log4j2 impl be present -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<!-- Fasterxml -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${fasterxml.jackson.databind.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-guava</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<!-- This one is necessary to support Java 8 Date/Time types (required for Jackson >= 2.13) -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<version>${fasterxml.jackson.module.scala.version}</version>
</dependency>
<!-- Provides performance improvements with json serialization/deserialization -->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-afterburner</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-afterburner</artifactId>
<version>${fasterxml.jackson.databind.version}</version>
</dependency>
<!-- Glassfish -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.connectors</groupId>
<artifactId>jersey-apache-connector</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<version>${glassfish.el.version}</version>
<scope>provided</scope>
</dependency>
<!-- Avro -->
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${avro.version}</version>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
<scope>provided</scope>
</dependency>
<!-- airlift -->
<dependency>
<groupId>io.airlift</groupId>
<artifactId>aircompressor</artifactId>
<version>${airlift.version}</version>
</dependency>
<!-- Snappy -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>${snappy.version}</version>
</dependency>
<!-- caffeine -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>${caffeine.version}</version>
</dependency>
<!-- Parquet -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>${parquet.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Orc -->
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-core</artifactId>
<version>${orc.spark.version}</version>
<scope>compile</scope>
</dependency>
<!-- RoaringBitmap -->
<dependency>
<groupId>org.roaringbitmap</groupId>
<artifactId>RoaringBitmap</artifactId>
<version>${roaringbitmap.version}</version>
</dependency>
<!-- Spark -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Flink -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.streaming.java.artifactId}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.clients.artifactId}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.connector.kafka.artifactId}</artifactId>
<version>${flink.connector.kafka.version}</version>
<scope>provided</scope>
</dependency>
<!-- Dropwizard Metrics -->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-graphite</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jmx</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_httpserver</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_dropwizard</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_pushgateway</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>com.beust</groupId>
<artifactId>jcommander</artifactId>
<version>1.78</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>${joda.version}</version>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.9.1</version>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
<version>2.7.3</version>
</dependency>
<dependency>
<groupId>org.rocksdb</groupId>
<artifactId>rocksdbjni</artifactId>
<version>${rocksdbjni.version}</version>
</dependency>
<!-- Httpcomponents -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>fluent-hc</artifactId>
<version>${http.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${http.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${http.version}</version>
</dependency>
<!-- Hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Hive -->
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-shims</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-serde</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
</exclusion>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>transaction-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<classifier>${hive.exec.classifier}</classifier>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${presto.version}</version>
</dependency>
<dependency>
<groupId>io.trino</groupId>
<artifactId>trino-jdbc</artifactId>
<version>${trino.version}</version>
</dependency>
<!-- Zookeeper -->
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${zk-curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<!-- Protobuf -->
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${proto.version}</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
<version>${proto.version}</version>
</dependency>
<!-- Junit 5 -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit.vintage.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
<version>${mockito.jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<!-- Kryo -->
<dependency>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
<version>${kryo.shaded.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<!--Used to test execution in task executor after de-serializing-->
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo</artifactId>
<version>4.0.0</version>
<scope>test</scope>
</dependency>
<!-- Other Utils -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.junit.jupiter</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Spring Boot -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${springboot.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.shell</groupId>
<artifactId>spring-shell-starter</artifactId>
<version>${spring.shell.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>Maven Central</id>
<name>Maven Repository</name>
<url>https://repo.maven.apache.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>cloudera-repo-releases</id>
<url>https://repository.cloudera.com/artifactory/public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repositories>
<profiles>
<profile>
<id>release</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<doclint>none</doclint>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.4</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>warn-log</id>
<activation>
<property>
<name>env.HUDI_QUIETER_LOGGING</name>
</property>
</activation>
<properties>
<surefire-log4j.file>log4j2-surefire-quiet.properties</surefire-log4j.file>
</properties>
</profile>
<profile>
<id>unit-tests</id>
<properties>
<skipUTs>false</skipUTs>
<skipFTs>true</skipFTs>
<skipITs>true</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration combine.self="append">
<skip>${skipUTs}</skip>
<forkedProcessExitTimeoutInSeconds>120</forkedProcessExitTimeoutInSeconds>
<excludedGroups>functional</excludedGroups>
<excludes>
<exclude>**/*FunctionalTestSuite.java</exclude>
<exclude>**/IT*.java</exclude>
<exclude>**/testsuite/**/Test*.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>post-unit-tests</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
<configuration>
<outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>functional-tests</id>
<properties>
<skipUTs>true</skipUTs>
<skipFTs>false</skipFTs>
<skipITs>true</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<dependencies>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
<artifactId>surefire-junit47</artifactId>
<version>${maven-surefire-plugin.version}</version>
</dependency>
</dependencies>
<configuration combine.self="append">
<skip>${skipFTs}</skip>
<forkCount>1</forkCount>
<reuseForks>true</reuseForks>
<includes>
<include>**/*FunctionalTestSuite.java</include>
</includes>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>post-functional-tests</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
<configuration>
<outputDirectory>${project.reporting.outputDirectory}/jacoco-ft</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>hudi-platform-service</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<modules>
<module>hudi-platform-service</module>
<module>packaging/hudi-metaserver-server-bundle</module>
</modules>
</profile>
<profile>
<id>integration-tests</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<modules>
<module>docker/hoodie/hadoop</module>
<module>hudi-integ-test</module>
<module>packaging/hudi-integ-test-bundle</module>
</modules>
<properties>
<skipUTs>true</skipUTs>
<skipFTs>true</skipFTs>
<skipITs>${skipTests}</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration combine.self="override">
<skip>${skipUTs}</skip>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration combine.self="override">
<skip>${skipITs}</skip>
<includes>
<include>**/IT*.java</include>
</includes>
<systemPropertyVariables>
<dynamodb-local.endpoint>${dynamodb-local.endpoint}</dynamodb-local.endpoint>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
<executions>
<execution>
<phase>integration-test</phase>
<goals>
<goal>integration-test</goal>
</goals>
</execution>
<execution>
<id>verify-integration-test</id>
<phase>verify</phase>
<goals>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>javadocs</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala-maven-plugin.version}</version>
<executions>
<execution>
<id>doc</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<excludes>
<exclude>${project.basedir}/src/main/scala</exclude>
</excludes>
<checkMultipleScalaVersions>false</checkMultipleScalaVersions>
</configuration>
</execution>
</executions>
<configuration>
<args>
<arg>-P:genjavadoc:out=${project.build.directory}/genjavadoc</arg>
</args>
<compilerPlugins>
<compilerPlugin>
<groupId>com.typesafe.genjavadoc</groupId>
<artifactId>genjavadoc-plugin_${scala.version}</artifactId>
<version>${genjavadoc-plugin.version}</version>
</compilerPlugin>
</compilerPlugins>
<excludes>
<exclude>**/*.scala</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/genjavadoc</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>aggregate</id>
<goals>
<goal>aggregate</goal>
</goals>
</execution>
</executions>
<configuration>
<!-- Turn off the javadoc doclint for now due to incomplete javadoc in the source
<doclint>all,-missing</doclint>
-->
<doclint>none</doclint>
<detectLinks>true</detectLinks>
<links>
<link>https://avro.apache.org/docs/${avro.version}/api/java</link>
<link>https://docs.spring.io/spring-shell/docs/1.2.0.RELEASE</link>
<link>https://fasterxml.github.io/jackson-databind/javadoc/2.6</link>
<link>https://hadoop.apache.org/docs/r${hadoop.version}/api</link>
<link>https://hbase.apache.org/2.4/apidocs</link>
<link>https://hive.apache.org/javadocs/r2.3.6/api</link>
<link>https://javadoc.io/static/io.javalin/javalin/2.3.0</link>
<link>https://javadoc.io/doc/org.apache.parquet/parquet-avro/${parquet.version}</link>
<link>https://javadoc.io/static/org.apache.parquet/parquet-hadoop/${parquet.version}</link>
<link>https://logging.apache.org/log4j/1.2/apidocs</link>
<link>https://metrics.dropwizard.io/4.1.0/apidocs</link>
<link>https://spark.apache.org/docs/${spark.version}/api/java</link>
</links>
<sourceFileExcludes>
<!--
Exclude the generated java files with the static reference to
the singleton instance of the Scala object, to avoid redundancy in javadoc
-->
<exclude>**/*$.java</exclude>
</sourceFileExcludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>scala-2.11</id>
<properties>
<scala.version>${scala11.version}</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<pulsar.spark.version>${pulsar.spark.scala11.version}</pulsar.spark.version>
</properties>
<activation>
<property>
<name>scala-2.11</name>
</property>
</activation>
</profile>
<profile>
<id>scala-2.12</id>
<properties>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
</properties>
<activation>
<property>
<name>scala-2.12</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>${maven-enforcer-plugin.version}</version>
<executions>
<execution>
<id>enforce-versions</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes combine.children="append">
<exclude>*:*_2.11</exclude>
</excludes>
</bannedDependencies>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- "spark2" is an alias of "spark2.4" -->
<!-- NOTE: This profile is deprecated and soon will be removed -->
<profile>
<id>spark2</id>
<modules>
<module>hudi-spark-datasource/hudi-spark2</module>
<module>hudi-spark-datasource/hudi-spark2-common</module>
</modules>
<properties>
<spark.version>${spark2.version}</spark.version>
<sparkbundle.version/>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark2</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark2-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>2.0.0</kafka.version>
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.6.0</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.7</antlr.version>
<fasterxml.version>2.6.7</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>2.6.7.1</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>2.7.4</fasterxml.jackson.dataformat.yaml.version>
<skip.hudi-spark3.unit.tests>true</skip.hudi-spark3.unit.tests>
<skipITs>true</skipITs>
</properties>
<activation>
<property>
<name>spark2</name>
</property>
</activation>
</profile>
<profile>
<id>spark2.4</id>
<modules>
<module>hudi-spark-datasource/hudi-spark2</module>
<module>hudi-spark-datasource/hudi-spark2-common</module>
</modules>
<properties>
<spark.version>${spark2.version}</spark.version>
<sparkbundle.version>2.4</sparkbundle.version>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark2</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark2-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>2.0.0</kafka.version>
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.6.0</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.7</antlr.version>
<fasterxml.version>2.6.7</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>2.6.7.1</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>2.7.4</fasterxml.jackson.dataformat.yaml.version>
<skip.hudi-spark3.unit.tests>true</skip.hudi-spark3.unit.tests>
<skipITs>false</skipITs>
</properties>
<activation>
<property>
<name>spark2.4</name>
</property>
</activation>
</profile>
<profile>
<id>m1-mac</id>
<properties>
<spark2.version>2.4.8</spark2.version>
</properties>
<activation>
<os>
<family>mac</family>
<arch>aarch64</arch>
</os>
</activation>
</profile>
<!-- "spark3" is an alias for "spark3.4" -->
<!-- NOTE: This profile is deprecated and soon will be removed -->
<profile>
<id>spark3</id>
<properties>
<spark3.version>${spark35.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3</sparkbundle.version>
<scala12.version>2.12.18</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.5.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<hive.storage.version>2.8.1</hive.storage.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.13.1</parquet.version>
<orc.spark.version>1.9.1</orc.spark.version>
<avro.version>1.11.2</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.15.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}
</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.20.0</log4j2.version>
<slf4j.version>2.0.7</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.5.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-storage-api</artifactId>
<version>${hive.storage.version}</version>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.0</id>
<properties>
<spark3.version>${spark30.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.0</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.0.x</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.5.13</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.8-1</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.0.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
</modules>
<activation>
<property>
<name>spark3.0</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.1</id>
<properties>
<spark3.version>${spark31.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.1</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.1.x</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.5.13</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.8-1</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.1.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
</modules>
<activation>
<property>
<name>spark3.1</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.2</id>
<properties>
<spark3.version>${spark32.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.2</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.2.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.2</parquet.version>
<orc.spark.version>1.6.12</orc.spark.version>
<avro.version>1.10.2</avro.version>
<antlr.version>4.8</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.2.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<activation>
<activeByDefault>true</activeByDefault>
<property>
<name>spark3.2</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.3</id>
<properties>
<spark3.version>${spark33.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.3</sparkbundle.version>
<scala12.version>2.12.15</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.3.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.2</parquet.version>
<orc.spark.version>1.7.8</orc.spark.version>
<avro.version>1.11.1</avro.version>
<antlr.version>4.8</antlr.version>
<fasterxml.spark3.version>2.13.3</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.3.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<activation>
<property>
<name>spark3.3</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.4</id>
<properties>
<spark3.version>${spark34.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.4</sparkbundle.version>
<scala12.version>2.12.17</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.4.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.3</parquet.version>
<orc.spark.version>1.8.3</orc.spark.version>
<avro.version>1.11.1</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.14.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.19.0</log4j2.version>
<slf4j.version>2.0.6</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.4.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3.4</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.5</id>
<properties>
<spark3.version>${spark35.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.5</sparkbundle.version>
<scala12.version>2.12.18</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.5.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<hive.storage.version>2.8.1</hive.storage.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.13.1</parquet.version>
<orc.spark.version>1.9.1</orc.spark.version>
<avro.version>1.11.2</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.15.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.20.0</log4j2.version>
<slf4j.version>2.0.7</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.5.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-storage-api</artifactId>
<version>${hive.storage.version}</version>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3.5</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.18</id>
<properties>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.13.1</flink.format.parquet.version>
</properties>
<activation>
<property>
<name>flink1.18</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.17</id>
<properties>
<flink.version>${flink1.17.version}</flink.version>
<hudi.flink.module>hudi-flink1.17.x</hudi.flink.module>
<flink.bundle.version>1.17</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.3</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.17.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.17</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.16</id>
<properties>
<flink.version>${flink1.16.version}</flink.version>
<hudi.flink.module>hudi-flink1.16.x</hudi.flink.module>
<flink.bundle.version>1.16</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.16.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.16</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.15</id>
<properties>
<flink.version>${flink1.15.version}</flink.version>
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
<flink.bundle.version>1.15</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.15.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.15</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.14</id>
<properties>
<flink.version>${flink1.14.version}</flink.version>
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
<flink.bundle.version>1.14</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet_${scala.binary.version}</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb_${scala.binary.version}</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils_${scala.binary.version}</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java_${scala.binary.version}</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
<flink.format.parquet.version>1.11.1</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.14.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.14</name>
</property>
</activation>
</profile>
<profile>
<id>skipShadeSources</id>
<properties>
<shadeSources>false</shadeSources>
</properties>
<activation>
<property>
<name>skipShadeSources</name>
</property>
</activation>
</profile>
<profile>
<id>java17</id>
<properties>
<argLine>-Xmx2g --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djol.magicFieldOffset=true</argLine>
</properties>
<activation>
<property>
<name>java17</name>
</property>
</activation>
</profile>
</profiles>
</project>
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。