1 Star 0 Fork 37

夏笙/Hudi

forked from Apache/Hudi 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
pom.xml 107.79 KB
一键复制 编辑 原始数据 按行查看 历史
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi</artifactId>
<packaging>pom</packaging>
<version>1.0.0-SNAPSHOT</version>
<description>Apache Hudi brings stream style processing on big data</description>
<url>https://github.com/apache/hudi</url>
<name>Hudi</name>
<modules>
<module>hudi-common</module>
<module>hudi-cli</module>
<module>hudi-client</module>
<module>hudi-aws</module>
<module>hudi-gcp</module>
<module>hudi-hadoop-common</module>
<module>hudi-hadoop-mr</module>
<module>hudi-io</module>
<module>hudi-spark-datasource</module>
<module>hudi-timeline-service</module>
<module>hudi-utilities</module>
<module>hudi-sync</module>
<module>packaging/hudi-hadoop-mr-bundle</module>
<module>packaging/hudi-datahub-sync-bundle</module>
<module>packaging/hudi-hive-sync-bundle</module>
<module>packaging/hudi-aws-bundle</module>
<module>packaging/hudi-gcp-bundle</module>
<module>packaging/hudi-spark-bundle</module>
<module>packaging/hudi-presto-bundle</module>
<module>packaging/hudi-utilities-bundle</module>
<module>packaging/hudi-utilities-slim-bundle</module>
<module>packaging/hudi-timeline-server-bundle</module>
<module>packaging/hudi-trino-bundle</module>
<module>hudi-examples</module>
<module>hudi-flink-datasource</module>
<module>hudi-kafka-connect</module>
<module>packaging/hudi-flink-bundle</module>
<module>packaging/hudi-kafka-connect-bundle</module>
<module>packaging/hudi-cli-bundle</module>
<module>hudi-tests-common</module>
</modules>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<organization>
<name>The Apache Software Foundation</name>
<url>https://www.apache.org</url>
</organization>
<properties>
<maven-jar-plugin.version>3.2.0</maven-jar-plugin.version>
<maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>2.22.2</maven-failsafe-plugin.version>
<maven-shade-plugin.version>3.4.0</maven-shade-plugin.version>
<maven-javadoc-plugin.version>3.1.1</maven-javadoc-plugin.version>
<maven-compiler-plugin.version>3.8.0</maven-compiler-plugin.version>
<maven-deploy-plugin.version>2.4</maven-deploy-plugin.version>
<genjavadoc-plugin.version>0.15</genjavadoc-plugin.version>
<build-helper-maven-plugin.version>1.7</build-helper-maven-plugin.version>
<maven-enforcer-plugin.version>3.0.0-M1</maven-enforcer-plugin.version>
<maven-docker-plugin.version>0.42.1</maven-docker-plugin.version>
<java.version>1.8</java.version>
<kryo.shaded.version>4.0.2</kryo.shaded.version>
<fasterxml.spark3.version>2.10.0</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<kafka.version>2.0.0</kafka.version>
<kafka.spark3.version>2.8.0</kafka.spark3.version>
<pulsar.version>3.0.2</pulsar.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<pulsar.spark.scala11.version>2.4.5</pulsar.spark.scala11.version>
<pulsar.spark.scala12.version>3.1.1.4</pulsar.spark.scala12.version>
<confluent.version>5.3.4</confluent.version>
<glassfish.version>2.17</glassfish.version>
<glassfish.el.version>3.0.1-b12</glassfish.el.version>
<parquet.version>1.10.1</parquet.version>
<junit.jupiter.version>5.7.2</junit.jupiter.version>
<junit.vintage.version>5.7.2</junit.vintage.version>
<junit.platform.version>1.7.2</junit.platform.version>
<mockito.jupiter.version>3.3.3</mockito.jupiter.version>
<log4j2.version>2.17.2</log4j2.version>
<slf4j.version>1.7.36</slf4j.version>
<joda.version>2.9.9</joda.version>
<hadoop.version>2.10.2</hadoop.version>
<hive.groupid>org.apache.hive</hive.groupid>
<hive.version>2.3.1</hive.version>
<hive.parquet.version>1.10.1</hive.parquet.version>
<hive.avro.version>1.8.2</hive.avro.version>
<presto.version>0.273</presto.version>
<trino.version>390</trino.version>
<hive.exec.classifier>core</hive.exec.classifier>
<metrics.version>4.1.1</metrics.version>
<orc.spark.version>1.6.0</orc.spark.version>
<orc.flink.version>1.5.6</orc.flink.version>
<roaringbitmap.version>0.9.47</roaringbitmap.version>
<airlift.version>0.25</airlift.version>
<prometheus.version>0.8.0</prometheus.version>
<aws.sdk.httpclient.version>4.5.13</aws.sdk.httpclient.version>
<aws.sdk.httpcore.version>4.4.13</aws.sdk.httpcore.version>
<http.version>4.4.1</http.version>
<spark.version>${spark3.version}</spark.version>
<spark2.version>2.4.4</spark2.version>
<spark3.version>3.4.1</spark3.version>
<sparkbundle.version></sparkbundle.version>
<flink1.18.version>1.18.0</flink1.18.version>
<flink1.17.version>1.17.1</flink1.17.version>
<flink1.16.version>1.16.2</flink1.16.version>
<flink1.15.version>1.15.1</flink1.15.version>
<flink1.14.version>1.14.5</flink1.14.version>
<flink.version>${flink1.18.version}</flink.version>
<hudi.flink.module>hudi-flink1.18.x</hudi.flink.module>
<flink.bundle.version>1.18</flink.bundle.version>
<!-- This is fixed to match with version from flink-avro -->
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.13.1</flink.format.parquet.version>
<flink.connector.kafka.version>3.0.0-1.17</flink.connector.kafka.version>
<flink.runtime.artifactId>flink-runtime</flink.runtime.artifactId>
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_2.12</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
<rocksdbjni.version>7.5.3</rocksdbjni.version>
<spark30.version>3.0.2</spark30.version>
<spark31.version>3.1.3</spark31.version>
<spark32.version>3.2.3</spark32.version>
<spark33.version>3.3.1</spark33.version>
<spark34.version>3.4.1</spark34.version>
<spark35.version>3.5.0</spark35.version>
<hudi.spark.module>hudi-spark3.2.x</hudi.spark.module>
<!-- NOTE: Different Spark versions might require different number of shared
modules being incorporated, hence we're creating multiple placeholders
(hudi.spark.common.modules.*) -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<avro.version>1.8.2</avro.version>
<caffeine.version>2.9.1</caffeine.version>
<commons.io.version>2.11.0</commons.io.version>
<scala11.version>2.11.12</scala11.version>
<scala12.version>2.12.10</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.collection-compat.version>2.8.1</scala.collection-compat.version>
<scala.binary.version>2.12</scala.binary.version>
<apache-rat-plugin.version>0.13</apache-rat-plugin.version>
<scala-maven-plugin.version>3.3.1</scala-maven-plugin.version>
<scalatest.spark_pre31.version>3.0.1</scalatest.spark_pre31.version>
<scalatest.spark3.version>3.1.0</scalatest.spark3.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<surefire-log4j.file>log4j2-surefire.properties</surefire-log4j.file>
<thrift.version>0.13.0</thrift.version>
<javalin.version>4.6.7</javalin.version>
<jetty.version>9.4.53.v20231009</jetty.version>
<htrace.version>3.1.0-incubating</htrace.version>
<hbase.version>2.4.13</hbase.version>
<h2.version>1.4.199</h2.version>
<awaitility.version>3.1.2</awaitility.version>
<skipTests>false</skipTests>
<skipUTs>${skipTests}</skipUTs>
<skipFTs>${skipTests}</skipFTs>
<skipITs>${skipTests}</skipITs>
<skip.hudi-spark2.unit.tests>${skipTests}</skip.hudi-spark2.unit.tests>
<skip.hudi-spark3.unit.tests>${skipTests}</skip.hudi-spark3.unit.tests>
<skipDocker>${skipTests}</skipDocker>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<main.basedir>${project.basedir}</main.basedir>
<spark.bundle.hive.scope>provided</spark.bundle.hive.scope>
<spark.bundle.hive.shade.prefix/>
<utilities.bundle.hive.scope>provided</utilities.bundle.hive.scope>
<utilities.bundle.hive.shade.prefix/>
<argLine>-Xmx2g -Xms128m</argLine>
<jacoco.version>0.8.8</jacoco.version>
<presto.bundle.bootstrap.scope>compile</presto.bundle.bootstrap.scope>
<presto.bundle.bootstrap.shade.prefix>org.apache.hudi.</presto.bundle.bootstrap.shade.prefix>
<trino.bundle.bootstrap.scope>compile</trino.bundle.bootstrap.scope>
<trino.bundle.bootstrap.shade.prefix>org.apache.hudi.</trino.bundle.bootstrap.shade.prefix>
<shadeSources>true</shadeSources>
<zk-curator.version>2.7.1</zk-curator.version>
<disruptor.version>3.4.2</disruptor.version>
<antlr.version>4.8</antlr.version>
<aws.sdk.version>2.18.40</aws.sdk.version>
<proto.version>3.21.7</proto.version>
<protoc.version>3.21.5</protoc.version>
<dynamodb.lockclient.version>1.2.0</dynamodb.lockclient.version>
<zookeeper.version>3.5.7</zookeeper.version>
<openjdk.jol.version>0.16</openjdk.jol.version>
<google.cloud.pubsub.version>1.120.0</google.cloud.pubsub.version>
<gcs.connector.version>hadoop2-2.2.7</gcs.connector.version>
<dynamodb-local.port>8000</dynamodb-local.port>
<dynamodb-local.endpoint>http://localhost:${dynamodb-local.port}</dynamodb-local.endpoint>
<moto.port>5000</moto.port>
<moto.endpoint>http://localhost:${moto.port}</moto.endpoint>
<springboot.version>2.7.3</springboot.version>
<spring.shell.version>2.1.1</spring.shell.version>
<snappy.version>1.1.8.3</snappy.version>
</properties>
<scm>
<connection>scm:git:git@github.com:apache/hudi.git</connection>
<developerConnection>scm:git:git@github.com:apache/hudi.git</developerConnection>
<url>git@github.com:apache/hudi.git</url>
<tag>HEAD</tag>
</scm>
<issueManagement>
<system>JIRA</system>
<url>https://issues.apache.org/jira/browse/HUDI</url>
</issueManagement>
<mailingLists>
<mailingList>
<name>Dev Mailing List</name>
<post>dev@hudi.apache.org</post>
<subscribe>dev-subscribe@hudi.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
<mailingList>
<name>User Mailing List</name>
<post>users@hudi.apache.org</post>
<subscribe>users-subscribe@hudi.apache.org</subscribe>
<unsubscribe>users-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
<mailingList>
<name>Commits Mailing List</name>
<post>commits@hudi.apache.org</post>
<subscribe>commits-subscribe@hudi.apache.org</subscribe>
<unsubscribe>commits-unsubscribe@hudi.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.0</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<!-- Set consoleOutput to true to see minor checkstyle issues -->
<consoleOutput>false</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<!-- NOTE: This property is only available in Maven >= 3.3.1 -->
<propertyExpansion>basedir=${maven.multiModuleProjectDirectory}</propertyExpansion>
<excludes>**\/generated-sources\/,org/apache/hudi/metaserver/thrift/*</excludes>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<!--
See https://jira.apache.org/jira/browse/HUDI-304
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<version>1.24.3</version>
<configuration>
<java>
<eclipse>
<file>${main.basedir}/style/eclipse-java-google-style.xml</file>
<version>4.10.0</version>
</eclipse>
</java>
<scala>
<trimTrailingWhitespace />
</scala>
</configuration>
<executions>
<execution>
<id>spotless-check</id>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>2.5.3</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<useReleaseProfile>false</useReleaseProfile>
<releaseProfiles>release,integration-tests</releaseProfiles>
<goals>deploy</goals>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>${maven-deploy-plugin.version}</version>
<executions>
<execution>
<id>default-deploy</id>
<phase>deploy</phase>
<goals>
<goal>deploy</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>${maven-failsafe-plugin.version}</version>
<configuration>
<skip>${skipITs}</skip>
<argLine>@{argLine}</argLine>
<useSystemClassLoader>false</useSystemClassLoader>
<systemPropertyVariables>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>${maven-enforcer-plugin.version}</version>
<executions>
<execution>
<id>enforce-logging</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes>
<exclude>org.sl4fj:slf4j-simple</exclude>
<exclude>org.sl4fj:slf4j-jdk14</exclude>
<exclude>org.sl4fj:slf4j-nop</exclude>
<exclude>org.sl4fj:slf4j-jcl</exclude>
<exclude>log4j:log4j</exclude>
<exclude>ch.qos.logback:logback-classic</exclude>
<!-- NOTE: We're banning any HBase deps versions other than the approved ${hbase.version},
which is aimed at preventing the classpath collisions w/ transitive deps usually) -->
<exclude>org.apache.hbase:hbase-common:*</exclude>
<exclude>org.apache.hbase:hbase-client:*</exclude>
<exclude>org.apache.hbase:hbase-server:*</exclude>
<!--To upgrade snappy because pre 1.1.8.2 does not work on m1 mac-->
<exclude>org.xerial.snappy:snappy-java:*</exclude>
</excludes>
<includes>
<include>org.slf4j:slf4j-simple:*:*:test</include>
<include>org.apache.hbase:hbase-common:${hbase.version}</include>
<include>org.apache.hbase:hbase-client:${hbase.version}</include>
<include>org.apache.hbase:hbase-server:${hbase.version}</include>
<!--To upgrade snappy because pre 1.1.8.2 does not work on m1 mac-->
<include>org.xerial.snappy:snappy-java:${snappy.version}</include>
</includes>
</bannedDependencies>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>${maven-docker-plugin.version}</version>
<configuration>
<skip>${skipDocker}</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<configuration>
<!-- common to all bundles -->
<artifactSet>
<includes>
<include>org.apache.hudi:hudi-io</include>
<include>io.airlift:aircompressor</include>
<!-- org.apache.httpcomponents -->
<include>org.apache.httpcomponents:httpclient</include>
<include>org.apache.httpcomponents:httpcore</include>
<include>org.apache.httpcomponents:fluent-hc</include>
<!-- hbase -->
<include>org.apache.hbase:hbase-client</include>
<include>org.apache.hbase:hbase-common</include>
<include>org.apache.hbase:hbase-hadoop-compat</include>
<include>org.apache.hbase:hbase-hadoop2-compat</include>
<include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-protocol</include>
<include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-miscellaneous</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-netty</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-protobuf</include>
<include>org.apache.hbase.thirdparty:hbase-unsafe</include>
<include>org.apache.htrace:htrace-core4</include>
<!-- afterburner module for jackson performance -->
<include>com.fasterxml.jackson.module:jackson-module-afterburner</include>
<!-- native HFile reader uses protobuf -->
<include>com.google.protobuf:protobuf-java</include>
</includes>
</artifactSet>
<relocations>
<!-- org.apache.httpcomponents -->
<relocation>
<pattern>org.apache.http.</pattern>
<shadedPattern>org.apache.hudi.org.apache.http.</shadedPattern>
</relocation>
<!-- hbase -->
<relocation>
<pattern>org.apache.hadoop.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.hbase.</shadedPattern>
<excludes>
<exclude>org.apache.hadoop.hbase.KeyValue$KeyComparator</exclude>
</excludes>
</relocation>
<relocation>
<pattern>org.apache.hbase.</pattern>
<shadedPattern>org.apache.hudi.org.apache.hbase.</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.htrace.</pattern>
<shadedPattern>org.apache.hudi.org.apache.htrace.</shadedPattern>
</relocation>
<!-- hbase
The classes below in org.apache.hadoop.metrics2 package come from
hbase-hadoop-compat and hbase-hadoop2-compat, which have to be shaded one by one,
instead of shading all classes under org.apache.hadoop.metrics2 including ones
from hadoop. -->
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.MetricsExecutor</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.MetricsExecutor
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.impl.JmxCacheBuster</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.impl.JmxCacheBuster
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper</pattern>
<shadedPattern>
org.apache.hudi.org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MetricsExecutorImpl</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MetricsExecutorImpl
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableFastCounter</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableFastCounter
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableRangeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableRangeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableSizeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableSizeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.lib.MutableTimeHistogram</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.lib.MutableTimeHistogram
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricQuantile</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricQuantile
</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.hadoop.metrics2.util.MetricSampleQuantiles</pattern>
<shadedPattern>org.apache.hudi.org.apache.hadoop.metrics2.util.MetricSampleQuantiles
</shadedPattern>
</relocation>
<relocation>
<pattern>com.fasterxml.jackson.module</pattern>
<shadedPattern>org.apache.hudi.com.fasterxml.jackson.module
</shadedPattern>
</relocation>
<relocation>
<pattern>com.google.protobuf.</pattern>
<shadedPattern>org.apache.hudi.com.google.protobuf.</shadedPattern>
</relocation>
</relocations>
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<rerunFailingTestsCount>3</rerunFailingTestsCount>
<argLine>@{argLine}</argLine>
<trimStackTrace>false</trimStackTrace>
<systemPropertyVariables>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
<forkedProcessExitTimeoutInSeconds>30</forkedProcessExitTimeoutInSeconds>
</configuration>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<version>2.2.0</version>
<configuration>
<skipTests>${skipUTs}</skipTests>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>TestSuite.txt</filereports>
<systemProperties>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemProperties>
</configuration>
<executions>
<execution>
<id>test</id>
<goals>
<goal>test</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala-maven-plugin.version}</version>
<configuration>
<checkMultipleScalaVersions>false</checkMultipleScalaVersions>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
<plugin>
<!-- excludes are inherited -->
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache-rat-plugin.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<numUnapprovedLicenses>0</numUnapprovedLicenses>
<licenses>
<!-- Enforce this license:
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL2 </licenseFamilyCategory>
<licenseFamilyName>Apache License 2.0</licenseFamilyName>
<notes />
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF) under one</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>NOTICE</exclude>
<exclude>DISCLAIMER</exclude>
<exclude>**/.*</exclude>
<exclude>**/emptyFile</exclude>
<exclude>**/*.json</exclude>
<exclude>**/*.hfile</exclude>
<exclude>**/*.log</exclude>
<exclude>**/*.sqltemplate</exclude>
<exclude>**/compose_env</exclude>
<exclude>**/*NOTICE*</exclude>
<exclude>**/*LICENSE*</exclude>
<exclude>**/dependency-reduced-pom.xml</exclude>
<exclude>**/test/resources/*.data</exclude>
<exclude>**/test/resources/*.commit</exclude>
<exclude>**/test/resources/**/*.txt</exclude>
<exclude>**/test/resources/**/*.avsc</exclude>
<exclude>**/target/**</exclude>
<exclude>**/generated-sources/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/banner.txt</exclude>
<!-- local files not in version control -->
<exclude>**/*.iml</exclude>
<exclude>.mvn/**</exclude>
</excludes>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<version>${avro.version}</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>schema</goal>
</goals>
<configuration>
<sourceDirectory>${project.basedir}/src/main/avro/</sourceDirectory>
<outputDirectory>${project.build.directory}/generated-sources/src/main/java/
</outputDirectory>
<stringType>String</stringType>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
<version>1.0.0</version>
<configuration>
<verbose>false</verbose>
<failOnViolation>true</failOnViolation>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<failOnWarning>false</failOnWarning>
<sourceDirectory>${project.basedir}/src/main/scala</sourceDirectory>
<testSourceDirectory>${project.basedir}/src/test/scala</testSourceDirectory>
<configLocation>${main.basedir}/style/scalastyle.xml</configLocation>
<outputEncoding>UTF-8</outputEncoding>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.github.os72</groupId>
<artifactId>protoc-jar-maven-plugin</artifactId>
<version>3.11.4</version>
<executions>
<execution>
<id>proto-compile</id>
<phase>generate-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<inputDirectories>
<include>src/main/resources</include>
</inputDirectories>
</configuration>
</execution>
<execution>
<id>proto-test-compile</id>
<phase>generate-test-sources</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<addSources>test</addSources>
<inputDirectories>
<include>src/test/resources</include>
</inputDirectories>
</configuration>
</execution>
</executions>
<configuration>
<protocArtifact>com.google.protobuf:protoc:${proto.version}</protocArtifact>
<protocVersion>${protoc.version}</protocVersion>
<includeStdTypes>true</includeStdTypes>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<dependencyManagement>
<dependencies>
<!-- Scala -->
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-collection-compat_${scala.binary.version}</artifactId>
<version>${scala.collection-compat.version}</version>
</dependency>
<dependency>
<groupId>org.openjdk.jol</groupId>
<artifactId>jol-core</artifactId>
<version>${openjdk.jol.version}</version>
</dependency>
<!-- Logging -->
<!-- NOTE: All the following deps have to have "provided" scope to make sure these are not conflicting
w/ implementations that are using Hudi as a library. For ex, all Spark < 3.3 are still relying on Log4j1
and therefore if we be bringing Log4j2 bridge for V1 on the classpath (log4j-1.2-api), it'll fail w/
`ClassNotFoundException`, since the bridge would be expecting Log4j2 impl be present -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<!-- Fasterxml -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${fasterxml.jackson.databind.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-guava</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<!-- This one is necessary to support Java 8 Date/Time types (required for Jackson >= 2.13) -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<version>${fasterxml.jackson.module.scala.version}</version>
</dependency>
<!-- Provides performance improvements with json serialization/deserialization -->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-afterburner</artifactId>
<version>${fasterxml.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-afterburner</artifactId>
<version>${fasterxml.jackson.databind.version}</version>
</dependency>
<!-- Glassfish -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-server</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.connectors</groupId>
<artifactId>jersey-apache-connector</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<version>${glassfish.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<version>${glassfish.el.version}</version>
<scope>provided</scope>
</dependency>
<!-- Avro -->
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${avro.version}</version>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
<scope>provided</scope>
</dependency>
<!-- airlift -->
<dependency>
<groupId>io.airlift</groupId>
<artifactId>aircompressor</artifactId>
<version>${airlift.version}</version>
</dependency>
<!-- Snappy -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>${snappy.version}</version>
</dependency>
<!-- caffeine -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>${caffeine.version}</version>
</dependency>
<!-- Parquet -->
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>${parquet.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Orc -->
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-core</artifactId>
<version>${orc.spark.version}</version>
<scope>compile</scope>
</dependency>
<!-- RoaringBitmap -->
<dependency>
<groupId>org.roaringbitmap</groupId>
<artifactId>RoaringBitmap</artifactId>
<version>${roaringbitmap.version}</version>
</dependency>
<!-- Spark -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<classifier>tests</classifier>
<version>${spark.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Flink -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.streaming.java.artifactId}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.clients.artifactId}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>${flink.connector.kafka.artifactId}</artifactId>
<version>${flink.connector.kafka.version}</version>
<scope>provided</scope>
</dependency>
<!-- Dropwizard Metrics -->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-graphite</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jmx</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_httpserver</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_dropwizard</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_pushgateway</artifactId>
<version>${prometheus.version}</version>
</dependency>
<dependency>
<groupId>com.beust</groupId>
<artifactId>jcommander</artifactId>
<version>1.78</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>${joda.version}</version>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.9.1</version>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
<version>2.7.3</version>
</dependency>
<dependency>
<groupId>org.rocksdb</groupId>
<artifactId>rocksdbjni</artifactId>
<version>${rocksdbjni.version}</version>
</dependency>
<!-- Httpcomponents -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>fluent-hc</artifactId>
<version>${http.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${http.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${http.version}</version>
</dependency>
<!-- Hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<classifier>tests</classifier>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Hive -->
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-shims</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-serde</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
</exclusion>
<exclusion>
<groupId>javax.transaction</groupId>
<artifactId>transaction-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<classifier>${hive.exec.classifier}</classifier>
<exclusions>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>org.pentaho</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${presto.version}</version>
</dependency>
<dependency>
<groupId>io.trino</groupId>
<artifactId>trino-jdbc</artifactId>
<version>${trino.version}</version>
</dependency>
<!-- Zookeeper -->
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${zk-curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${zk-curator.version}</version>
</dependency>
<!-- Protobuf -->
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${proto.version}</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
<version>${proto.version}</version>
</dependency>
<!-- Junit 5 -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit.vintage.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<scope>test</scope>
<version>${mockito.jupiter.version}</version>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-runner</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-suite-api</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
<version>${junit.platform.version}</version>
<scope>test</scope>
</dependency>
<!-- Kryo -->
<dependency>
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo-shaded</artifactId>
<version>${kryo.shaded.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<!--Used to test execution in task executor after de-serializing-->
<groupId>com.esotericsoftware</groupId>
<artifactId>kryo</artifactId>
<version>4.0.0</version>
<scope>test</scope>
</dependency>
<!-- Other Utils -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.junit.jupiter</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Spring Boot -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${springboot.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.shell</groupId>
<artifactId>spring-shell-starter</artifactId>
<version>${spring.shell.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>Maven Central</id>
<name>Maven Repository</name>
<url>https://repo.maven.apache.org/maven2</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>cloudera-repo-releases</id>
<url>https://repository.cloudera.com/artifactory/public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repositories>
<profiles>
<profile>
<id>release</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<doclint>none</doclint>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.4</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>warn-log</id>
<activation>
<property>
<name>env.HUDI_QUIETER_LOGGING</name>
</property>
</activation>
<properties>
<surefire-log4j.file>log4j2-surefire-quiet.properties</surefire-log4j.file>
</properties>
</profile>
<profile>
<id>unit-tests</id>
<properties>
<skipUTs>false</skipUTs>
<skipFTs>true</skipFTs>
<skipITs>true</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration combine.self="append">
<skip>${skipUTs}</skip>
<forkedProcessExitTimeoutInSeconds>120</forkedProcessExitTimeoutInSeconds>
<excludedGroups>functional</excludedGroups>
<excludes>
<exclude>**/*FunctionalTestSuite.java</exclude>
<exclude>**/IT*.java</exclude>
<exclude>**/testsuite/**/Test*.java</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>post-unit-tests</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
<configuration>
<outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>functional-tests</id>
<properties>
<skipUTs>true</skipUTs>
<skipFTs>false</skipFTs>
<skipITs>true</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<dependencies>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
<artifactId>surefire-junit47</artifactId>
<version>${maven-surefire-plugin.version}</version>
</dependency>
</dependencies>
<configuration combine.self="append">
<skip>${skipFTs}</skip>
<forkCount>1</forkCount>
<reuseForks>true</reuseForks>
<includes>
<include>**/*FunctionalTestSuite.java</include>
</includes>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>post-functional-tests</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
<configuration>
<outputDirectory>${project.reporting.outputDirectory}/jacoco-ft</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>hudi-platform-service</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<modules>
<module>hudi-platform-service</module>
<module>packaging/hudi-metaserver-server-bundle</module>
</modules>
</profile>
<profile>
<id>integration-tests</id>
<activation>
<property>
<name>deployArtifacts</name>
<value>true</value>
</property>
</activation>
<modules>
<module>docker/hoodie/hadoop</module>
<module>hudi-integ-test</module>
<module>packaging/hudi-integ-test-bundle</module>
</modules>
<properties>
<skipUTs>true</skipUTs>
<skipFTs>true</skipFTs>
<skipITs>${skipTests}</skipITs>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration combine.self="override">
<skip>${skipUTs}</skip>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration combine.self="override">
<skip>${skipITs}</skip>
<includes>
<include>**/IT*.java</include>
</includes>
<systemPropertyVariables>
<dynamodb-local.endpoint>${dynamodb-local.endpoint}</dynamodb-local.endpoint>
<log4j.configurationFile>${surefire-log4j.file}</log4j.configurationFile>
</systemPropertyVariables>
<useSystemClassLoader>false</useSystemClassLoader>
</configuration>
<executions>
<execution>
<phase>integration-test</phase>
<goals>
<goal>integration-test</goal>
</goals>
</execution>
<execution>
<id>verify-integration-test</id>
<phase>verify</phase>
<goals>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>javadocs</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>${scala-maven-plugin.version}</version>
<executions>
<execution>
<id>doc</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<excludes>
<exclude>${project.basedir}/src/main/scala</exclude>
</excludes>
<checkMultipleScalaVersions>false</checkMultipleScalaVersions>
</configuration>
</execution>
</executions>
<configuration>
<args>
<arg>-P:genjavadoc:out=${project.build.directory}/genjavadoc</arg>
</args>
<compilerPlugins>
<compilerPlugin>
<groupId>com.typesafe.genjavadoc</groupId>
<artifactId>genjavadoc-plugin_${scala.version}</artifactId>
<version>${genjavadoc-plugin.version}</version>
</compilerPlugin>
</compilerPlugins>
<excludes>
<exclude>**/*.scala</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/genjavadoc</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>aggregate</id>
<goals>
<goal>aggregate</goal>
</goals>
</execution>
</executions>
<configuration>
<!-- Turn off the javadoc doclint for now due to incomplete javadoc in the source
<doclint>all,-missing</doclint>
-->
<doclint>none</doclint>
<detectLinks>true</detectLinks>
<links>
<link>https://avro.apache.org/docs/${avro.version}/api/java</link>
<link>https://docs.spring.io/spring-shell/docs/1.2.0.RELEASE</link>
<link>https://fasterxml.github.io/jackson-databind/javadoc/2.6</link>
<link>https://hadoop.apache.org/docs/r${hadoop.version}/api</link>
<link>https://hbase.apache.org/2.4/apidocs</link>
<link>https://hive.apache.org/javadocs/r2.3.6/api</link>
<link>https://javadoc.io/static/io.javalin/javalin/2.3.0</link>
<link>https://javadoc.io/doc/org.apache.parquet/parquet-avro/${parquet.version}</link>
<link>https://javadoc.io/static/org.apache.parquet/parquet-hadoop/${parquet.version}</link>
<link>https://logging.apache.org/log4j/1.2/apidocs</link>
<link>https://metrics.dropwizard.io/4.1.0/apidocs</link>
<link>https://spark.apache.org/docs/${spark.version}/api/java</link>
</links>
<sourceFileExcludes>
<!--
Exclude the generated java files with the static reference to
the singleton instance of the Scala object, to avoid redundancy in javadoc
-->
<exclude>**/*$.java</exclude>
</sourceFileExcludes>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>scala-2.11</id>
<properties>
<scala.version>${scala11.version}</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<pulsar.spark.version>${pulsar.spark.scala11.version}</pulsar.spark.version>
</properties>
<activation>
<property>
<name>scala-2.11</name>
</property>
</activation>
</profile>
<profile>
<id>scala-2.12</id>
<properties>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
</properties>
<activation>
<property>
<name>scala-2.12</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>${maven-enforcer-plugin.version}</version>
<executions>
<execution>
<id>enforce-versions</id>
<goals>
<goal>enforce</goal>
</goals>
<configuration>
<rules>
<bannedDependencies>
<excludes combine.children="append">
<exclude>*:*_2.11</exclude>
</excludes>
</bannedDependencies>
</rules>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- "spark2" is an alias of "spark2.4" -->
<!-- NOTE: This profile is deprecated and soon will be removed -->
<profile>
<id>spark2</id>
<modules>
<module>hudi-spark-datasource/hudi-spark2</module>
<module>hudi-spark-datasource/hudi-spark2-common</module>
</modules>
<properties>
<spark.version>${spark2.version}</spark.version>
<sparkbundle.version/>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark2</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark2-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>2.0.0</kafka.version>
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.6.0</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.7</antlr.version>
<fasterxml.version>2.6.7</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>2.6.7.1</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>2.7.4</fasterxml.jackson.dataformat.yaml.version>
<skip.hudi-spark3.unit.tests>true</skip.hudi-spark3.unit.tests>
<skipITs>true</skipITs>
</properties>
<activation>
<property>
<name>spark2</name>
</property>
</activation>
</profile>
<profile>
<id>spark2.4</id>
<modules>
<module>hudi-spark-datasource/hudi-spark2</module>
<module>hudi-spark-datasource/hudi-spark2-common</module>
</modules>
<properties>
<spark.version>${spark2.version}</spark.version>
<sparkbundle.version>2.4</sparkbundle.version>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark2</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark2-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>2.0.0</kafka.version>
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.6.0</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.7</antlr.version>
<fasterxml.version>2.6.7</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>2.6.7.1</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>2.7.4</fasterxml.jackson.dataformat.yaml.version>
<skip.hudi-spark3.unit.tests>true</skip.hudi-spark3.unit.tests>
<skipITs>false</skipITs>
</properties>
<activation>
<property>
<name>spark2.4</name>
</property>
</activation>
</profile>
<profile>
<id>m1-mac</id>
<properties>
<spark2.version>2.4.8</spark2.version>
</properties>
<activation>
<os>
<family>mac</family>
<arch>aarch64</arch>
</os>
</activation>
</profile>
<!-- "spark3" is an alias for "spark3.4" -->
<!-- NOTE: This profile is deprecated and soon will be removed -->
<profile>
<id>spark3</id>
<properties>
<spark3.version>${spark35.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3</sparkbundle.version>
<scala12.version>2.12.18</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.5.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<hive.storage.version>2.8.1</hive.storage.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.13.1</parquet.version>
<orc.spark.version>1.9.1</orc.spark.version>
<avro.version>1.11.2</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.15.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}
</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.20.0</log4j2.version>
<slf4j.version>2.0.7</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.5.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-storage-api</artifactId>
<version>${hive.storage.version}</version>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.0</id>
<properties>
<spark3.version>${spark30.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.0</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark_pre31.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.0.x</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.5.13</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.8-1</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.0.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
</modules>
<activation>
<property>
<name>spark3.0</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.1</id>
<properties>
<spark3.version>${spark31.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.1</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.1.x</hudi.spark.module>
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2/>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.10.1</parquet.version>
<orc.spark.version>1.5.13</orc.spark.version>
<avro.version>1.8.2</avro.version>
<antlr.version>4.8-1</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.1.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
</modules>
<activation>
<property>
<name>spark3.1</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.2</id>
<properties>
<spark3.version>${spark32.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.2</sparkbundle.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.2.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.2</parquet.version>
<orc.spark.version>1.6.12</orc.spark.version>
<avro.version>1.10.2</avro.version>
<antlr.version>4.8</antlr.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.2.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<activation>
<activeByDefault>true</activeByDefault>
<property>
<name>spark3.2</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.3</id>
<properties>
<spark3.version>${spark33.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.3</sparkbundle.version>
<scala12.version>2.12.15</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<hudi.spark.module>hudi-spark3.3.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.2</parquet.version>
<orc.spark.version>1.7.8</orc.spark.version>
<avro.version>1.11.1</avro.version>
<antlr.version>4.8</antlr.version>
<fasterxml.spark3.version>2.13.3</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.3.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<activation>
<property>
<name>spark3.3</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.4</id>
<properties>
<spark3.version>${spark34.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.4</sparkbundle.version>
<scala12.version>2.12.17</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.4.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.12.3</parquet.version>
<orc.spark.version>1.8.3</orc.spark.version>
<avro.version>1.11.1</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.14.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.19.0</log4j2.version>
<slf4j.version>2.0.6</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.4.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3.4</name>
</property>
</activation>
</profile>
<profile>
<id>spark3.5</id>
<properties>
<spark3.version>${spark35.version}</spark3.version>
<spark.version>${spark3.version}</spark.version>
<sparkbundle.version>3.5</sparkbundle.version>
<scala12.version>2.12.18</scala12.version>
<scala.version>${scala12.version}</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<hudi.spark.module>hudi-spark3.5.x</hudi.spark.module>
<!-- This glob has to include hudi-spark3-common, hudi-spark3.2plus-common -->
<hudi.spark.common.modules.1>hudi-spark3-common</hudi.spark.common.modules.1>
<hudi.spark.common.modules.2>hudi-spark3.2plus-common</hudi.spark.common.modules.2>
<scalatest.version>${scalatest.spark3.version}</scalatest.version>
<kafka.version>${kafka.spark3.version}</kafka.version>
<hive.storage.version>2.8.1</hive.storage.version>
<!-- NOTE: Some Hudi modules require standalone Parquet/Orc/etc file-format dependency (hudi-hive-sync,
hudi-hadoop-mr, for ex). Since these Hudi modules might be used from w/in the execution engine(s)
bringing these file-formats as dependencies as well, we need to make sure that versions are
synchronized to avoid classpath ambiguity -->
<parquet.version>1.13.1</parquet.version>
<orc.spark.version>1.9.1</orc.spark.version>
<avro.version>1.11.2</avro.version>
<antlr.version>4.9.3</antlr.version>
<fasterxml.spark3.version>2.15.2</fasterxml.spark3.version>
<fasterxml.version>${fasterxml.spark3.version}</fasterxml.version>
<fasterxml.jackson.databind.version>${fasterxml.spark3.version}</fasterxml.jackson.databind.version>
<fasterxml.jackson.module.scala.version>${fasterxml.spark3.version}</fasterxml.jackson.module.scala.version>
<fasterxml.jackson.dataformat.yaml.version>${fasterxml.spark3.version}</fasterxml.jackson.dataformat.yaml.version>
<pulsar.spark.version>${pulsar.spark.scala12.version}</pulsar.spark.version>
<log4j2.version>2.20.0</log4j2.version>
<slf4j.version>2.0.7</slf4j.version>
<skip.hudi-spark2.unit.tests>true</skip.hudi-spark2.unit.tests>
<skipITs>true</skipITs>
</properties>
<modules>
<module>hudi-spark-datasource/hudi-spark3.5.x</module>
<module>hudi-spark-datasource/hudi-spark3-common</module>
<module>hudi-spark-datasource/hudi-spark3.2plus-common</module>
</modules>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>${hive.groupid}</groupId>
<artifactId>hive-storage-api</artifactId>
<version>${hive.storage.version}</version>
</dependency>
</dependencies>
<activation>
<property>
<name>spark3.5</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.18</id>
<properties>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.13.1</flink.format.parquet.version>
</properties>
<activation>
<property>
<name>flink1.18</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.17</id>
<properties>
<flink.version>${flink1.17.version}</flink.version>
<hudi.flink.module>hudi-flink1.17.x</hudi.flink.module>
<flink.bundle.version>1.17</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.3</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.17.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.17</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.16</id>
<properties>
<flink.version>${flink1.16.version}</flink.version>
<hudi.flink.module>hudi-flink1.16.x</hudi.flink.module>
<flink.bundle.version>1.16</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.16.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.16</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.15</id>
<properties>
<flink.version>${flink1.15.version}</flink.version>
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
<flink.bundle.version>1.15</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.15.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.15</name>
</property>
</activation>
</profile>
<profile>
<id>flink1.14</id>
<properties>
<flink.version>${flink1.14.version}</flink.version>
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
<flink.bundle.version>1.14</flink.bundle.version>
<orc.flink.version>1.5.6</orc.flink.version>
<flink.avro.version>1.11.1</flink.avro.version>
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet_${scala.binary.version}</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb_${scala.binary.version}</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils_${scala.binary.version}</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java_${scala.binary.version}</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
<flink.format.parquet.version>1.11.1</flink.format.parquet.version>
<flink.connector.kafka.version>${flink1.14.version}</flink.connector.kafka.version>
</properties>
<activation>
<property>
<name>flink1.14</name>
</property>
</activation>
</profile>
<profile>
<id>skipShadeSources</id>
<properties>
<shadeSources>false</shadeSources>
</properties>
<activation>
<property>
<name>skipShadeSources</name>
</property>
</activation>
</profile>
<profile>
<id>java17</id>
<properties>
<argLine>-Xmx2g --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djol.magicFieldOffset=true</argLine>
</properties>
<activation>
<property>
<name>java17</name>
</property>
</activation>
</profile>
</profiles>
</project>
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
Java
1
https://gitee.com/quantongsui/Hudi.git
git@gitee.com:quantongsui/Hudi.git
quantongsui
Hudi
Hudi
master

搜索帮助