Skip to content

Commit

Permalink
[HUDI-4644] Change default flink profile to 1.15.x (apache#6445)
Browse files Browse the repository at this point in the history
  • Loading branch information
danny0405 authored and fengjian committed Apr 5, 2023
1 parent 9a01c14 commit 916fa60
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 41 deletions.
7 changes: 0 additions & 7 deletions hudi-examples/hudi-examples-flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -333,13 +333,6 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
Expand Down
5 changes: 0 additions & 5 deletions hudi-flink-datasource/hudi-flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -239,11 +239,6 @@
<artifactId>jcommander</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>bijection-avro_${scala.binary.version}</artifactId>
<version>0.9.7</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,6 @@ private void testWriteToHoodie(
execEnv.addOperator(pipeline.getTransformation());

if (isMor) {
Pipelines.clean(conf, pipeline);
Pipelines.compact(conf, pipeline);
}

Expand Down
39 changes: 11 additions & 28 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -131,20 +131,20 @@
<flink1.15.version>1.15.1</flink1.15.version>
<flink1.14.version>1.14.5</flink1.14.version>
<flink1.13.version>1.13.6</flink1.13.version>
<flink.version>${flink1.13.version}</flink.version>
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
<flink.bundle.version>1.13</flink.bundle.version>
<flink.version>${flink1.15.version}</flink.version>
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
<flink.bundle.version>1.15</flink.bundle.version>
<flink.format.parquet.version>1.12.2</flink.format.parquet.version>
<flink.runtime.artifactId>flink-runtime</flink.runtime.artifactId>
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_2.12</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
<spark31.version>3.1.3</spark31.version>
<spark32.version>3.2.1</spark32.version>
<spark33.version>3.3.0</spark33.version>
Expand Down Expand Up @@ -1828,20 +1828,6 @@

<profile>
<id>flink1.15</id>
<properties>
<flink.version>${flink1.15.version}</flink.version>
<flink.table.runtime.artifactId>flink-table-runtime</flink.table.runtime.artifactId>
<flink.parquet.artifactId>flink-parquet</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka</flink.connector.kafka.artifactId>
<!-- 1.15 only supports scala2.12 -->
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_2.12</flink.hadoop.compatibility.artifactId>
<hudi.flink.module>hudi-flink1.15.x</hudi.flink.module>
<flink.bundle.version>1.15</flink.bundle.version>
</properties>
<activation>
<property>
<name>flink1.15</name>
Expand All @@ -1852,31 +1838,30 @@
<id>flink1.14</id>
<properties>
<flink.version>${flink1.14.version}</flink.version>
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
<flink.bundle.version>1.14</flink.bundle.version>
<flink.table.runtime.artifactId>flink-table-runtime_${scala.binary.version}</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner_${scala.binary.version}</flink.table.planner.artifactId>
<flink.parquet.artifactId>flink-parquet_${scala.binary.version}</flink.parquet.artifactId>
<flink.statebackend.rocksdb.artifactId>flink-statebackend-rocksdb_${scala.binary.version}</flink.statebackend.rocksdb.artifactId>
<flink.test.utils.artifactId>flink-test-utils_${scala.binary.version}</flink.test.utils.artifactId>
<flink.streaming.java.artifactId>flink-streaming-java_${scala.binary.version}</flink.streaming.java.artifactId>
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
<hudi.flink.module>hudi-flink1.14.x</hudi.flink.module>
<flink.bundle.version>1.14</flink.bundle.version>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
<property>
<name>flink1.14</name>
<!-- add flink1.14 module to all profile -->
<value>!disabled</value>
</property>
</activation>
</profile>
<profile>
<id>flink1.13</id>
<properties>
<flink.scala.bnary.version>2.11</flink.scala.bnary.version>
<flink.version>${flink1.13.version}</flink.version>
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
<flink.bundle.version>1.13</flink.bundle.version>
<flink.runtime.artifactId>flink-runtime_${scala.binary.version}</flink.runtime.artifactId>
<flink.table.runtime.artifactId>flink-table-runtime-blink_${scala.binary.version}</flink.table.runtime.artifactId>
<flink.table.planner.artifactId>flink-table-planner-blink_${scala.binary.version}</flink.table.planner.artifactId>
Expand All @@ -1887,8 +1872,6 @@
<flink.clients.artifactId>flink-clients_${scala.binary.version}</flink.clients.artifactId>
<flink.connector.kafka.artifactId>flink-connector-kafka_${scala.binary.version}</flink.connector.kafka.artifactId>
<flink.hadoop.compatibility.artifactId>flink-hadoop-compatibility_${scala.binary.version}</flink.hadoop.compatibility.artifactId>
<hudi.flink.module>hudi-flink1.13.x</hudi.flink.module>
<flink.bundle.version>1.13</flink.bundle.version>
<skipITs>true</skipITs>
</properties>
<activation>
Expand Down

0 comments on commit 916fa60

Please sign in to comment.