From d9039c4afd9648b5c737cb53a683bb1ad0cb2d61 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Mon, 5 Jun 2023 23:12:37 +0800
Subject: [PATCH 01/24] [FLINK-32068]connector jdbc support clickhouse
---
flink-connector-jdbc/pom.xml | 15 +++
.../clickhouse/dialect/ClickHouseDialect.java | 86 ++++++++++++++
.../dialect/ClickHouseDialectFactory.java | 19 +++
.../dialect/ClickHouseRowConvert.java | 86 ++++++++++++++
.../converter/ClickhouseRowConvert.java | 19 +++
....connector.jdbc.dialect.JdbcDialectFactory | 1 +
.../clickhouse/ClickHouseTestBase.java | 17 +++
.../dialect/ClickHouseDialectTypeTest.java | 41 +++++++
.../table/ClickHouseTableSourceITCase.java | 78 ++++++++++++
.../clickhouse/table/ClickhouseTableRow.java | 34 ++++++
.../table/ClickhouseTableSinkITCase.java | 95 +++++++++++++++
.../table/UnsignedTypeConversionITCase.java | 111 ++++++++++++++++++
.../table/JdbcDynamicTableSinkITCase.java | 6 +
.../jdbc/testutils/DatabaseMetadata.java | 4 +
.../clickhouse/ClickHouseDatabase.java | 55 +++++++++
.../clickhouse/ClickHouseImages.java | 11 ++
.../clickhouse/ClickHouseMetadata.java | 88 ++++++++++++++
.../jdbc/testutils/tables/TableBase.java | 6 +-
.../jdbc/testutils/tables/TableBuilder.java | 5 +
19 files changed, 776 insertions(+), 1 deletion(-)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseTestBase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectTypeTest.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableRow.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableSinkITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/UnsignedTypeConversionITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseImages.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseMetadata.java
diff --git a/flink-connector-jdbc/pom.xml b/flink-connector-jdbc/pom.xml
index 0a298e8b2..e6c08d61f 100644
--- a/flink-connector-jdbc/pom.xml
+++ b/flink-connector-jdbc/pom.xml
@@ -206,6 +206,21 @@ under the License.
test
+
+
+ com.clickhouse
+ clickhouse-jdbc
+ 0.4.6
+ provided
+
+
+
+ org.testcontainers
+ clickhouse
+ test
+
+
+
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
new file mode 100644
index 000000000..620e89352
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
@@ -0,0 +1,86 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.dialect;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** clickhouse dialect. */
+public class ClickHouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
+ private static final int MAX_TIMESTAMP_PRECISION = 9;
+ private static final int MIN_TIMESTAMP_PRECISION = 0;
+
+ // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
+ private static final int MAX_DECIMAL_PRECISION = 76;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickHouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "ClickHouse";
+ }
+
+ @Override
+ public Optional timestampPrecisionRange() {
+ return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
+ }
+
+ @Override
+ public Optional decimalPrecisionRange() {
+ return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectFactory.java
new file mode 100644
index 000000000..a1eaf304c
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.dialect;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** clickhouse dialect factory. */
+@Internal
+public class ClickHouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickHouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
new file mode 100644
index 000000000..1fd1d8e58
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
@@ -0,0 +1,86 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.dialect;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.data.DecimalData;
+import org.apache.flink.table.data.StringData;
+import org.apache.flink.table.data.TimestampData;
+import org.apache.flink.table.types.logical.DecimalType;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+
+import com.clickhouse.data.value.UnsignedByte;
+import com.clickhouse.data.value.UnsignedInteger;
+import com.clickhouse.data.value.UnsignedShort;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+
+/**
+ * Runtime converter that responsible to convert between JDBC object and Flink internal object for
+ * the clickhouse data types range link
+ * https://clickhouse.com/docs/en/sql-reference/data-types/int-uint .
+ */
+public class ClickHouseRowConvert extends AbstractJdbcRowConverter {
+ @Override
+ public String converterName() {
+ return "ClickHouse";
+ }
+
+ public ClickHouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+
+ @Override
+ protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
+ switch (type.getTypeRoot()) {
+ case NULL:
+ return null;
+ case BOOLEAN:
+ case FLOAT:
+ case DOUBLE:
+ return val -> val;
+ case TINYINT:
+ return val -> ((Byte) val).byteValue();
+ case SMALLINT:
+ return val ->
+ val instanceof UnsignedByte
+ ? ((UnsignedByte) val).shortValue()
+ : ((Short) val).shortValue();
+ case INTEGER:
+ return val ->
+ val instanceof UnsignedShort
+ ? ((UnsignedShort) val).intValue()
+ : ((Integer) val).intValue();
+ case BIGINT:
+ return jdbcField -> {
+ if (jdbcField instanceof UnsignedInteger) {
+ return ((UnsignedInteger) jdbcField).longValue();
+ } else if (jdbcField instanceof Long) {
+ return ((Long) jdbcField).longValue();
+ }
+ // UINT64 is not supported,the uint64 range exceeds the long range
+ throw new UnsupportedOperationException("Unsupported type:" + type);
+ };
+ case DECIMAL:
+ final int precision = ((DecimalType) type).getPrecision();
+ final int scale = ((DecimalType) type).getScale();
+ return val ->
+ val instanceof BigInteger
+ ? DecimalData.fromBigDecimal(
+ new BigDecimal((BigInteger) val, 0), precision, scale)
+ : DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
+ case CHAR:
+ case VARCHAR:
+ return val -> StringData.fromString((String) val);
+ case DATE:
+ return val -> Long.valueOf(((LocalDate) val).toEpochDay()).intValue();
+ case TIMESTAMP_WITHOUT_TIME_ZONE:
+ case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
+ return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
+ default:
+ return super.createInternalConverter(type);
+ }
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
new file mode 100644
index 000000000..f696111b7
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.types.logical.RowType;
+
+/** */
+public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public String converterName() {
+ return "Clickhouse";
+ }
+
+ public ClickhouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory b/flink-connector-jdbc/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory
index e5a05b770..ffa20664a 100644
--- a/flink-connector-jdbc/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory
+++ b/flink-connector-jdbc/src/main/resources/META-INF/services/org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory
@@ -19,3 +19,4 @@ org.apache.flink.connector.jdbc.databases.postgres.dialect.PostgresDialectFactor
org.apache.flink.connector.jdbc.databases.oracle.dialect.OracleDialectFactory
org.apache.flink.connector.jdbc.databases.sqlserver.dialect.SqlServerDialectFactory
org.apache.flink.connector.jdbc.databases.cratedb.dialect.CrateDBDialectFactory
+org.apache.flink.connector.jdbc.databases.clickhouse.dialect.ClickHouseDialectFactory
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseTestBase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseTestBase.java
new file mode 100644
index 000000000..612d9bd12
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseTestBase.java
@@ -0,0 +1,17 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+import org.apache.flink.connector.jdbc.testutils.DatabaseTest;
+import org.apache.flink.connector.jdbc.testutils.databases.clickhouse.ClickHouseDatabase;
+
+import org.junit.jupiter.api.extension.ExtendWith;
+
+/** clickhouse database for testing. */
+@ExtendWith(ClickHouseDatabase.class)
+public interface ClickHouseTestBase extends DatabaseTest {
+
+ @Override
+ default DatabaseMetadata getMetadata() {
+ return ClickHouseDatabase.getMetadata();
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectTypeTest.java
new file mode 100644
index 000000000..8380de232
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialectTypeTest.java
@@ -0,0 +1,41 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.dialect;
+
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
+public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
+
+ @Override
+ protected String testDialect() {
+ return "clickhouse";
+ }
+
+ @Override
+ protected List testData() {
+ return Arrays.asList(
+ createTestItem("CHAR"),
+ createTestItem("VARCHAR"),
+ createTestItem("BOOLEAN"),
+ createTestItem("TINYINT"),
+ createTestItem("SMALLINT"),
+ createTestItem("INTEGER"),
+ createTestItem("BIGINT"),
+ createTestItem("FLOAT"),
+ createTestItem("DOUBLE"),
+ createTestItem("DECIMAL(10, 4)"),
+ createTestItem("DECIMAL(38, 18)"),
+ createTestItem("DATE"),
+ createTestItem("TIMESTAMP(3)"),
+ createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
+ createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
+
+ // Not valid data
+ createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
+ createTestItem(
+ "VARBINARY(10)",
+ "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
new file mode 100644
index 000000000..0dd88ed07
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
@@ -0,0 +1,78 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.table;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseTestBase;
+import org.apache.flink.connector.jdbc.databases.clickhouse.dialect.ClickHouseDialect;
+import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSourceITCase;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.types.Row;
+
+import java.math.BigDecimal;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.util.Arrays;
+import java.util.List;
+import java.util.TimeZone;
+
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.ckTableRow;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.dbType;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.field;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.pkField;
+
+/** The Table Source ITCase for {@link ClickHouseDialect}. */
+class ClickHouseTableSourceITCase extends JdbcDynamicTableSourceITCase
+ implements ClickHouseTestBase {
+
+ @Override
+ protected ClickhouseTableRow createInputTable() {
+ return ckTableRow(
+ "jdbDynamicTableSource",
+ pkField("id", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ field("user_id_int8", dbType("Int8"), DataTypes.TINYINT().notNull()),
+ field("user_id_int16", dbType("Int16"), DataTypes.SMALLINT().notNull()),
+ field("user_id_int32", dbType("Int32"), DataTypes.INT().notNull()),
+ field("user_id_int64", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ field("price_float", dbType("Float32"), DataTypes.FLOAT()),
+ field("price_double", dbType("Float64"), DataTypes.DOUBLE()),
+ field("decimal_col", dbType("Decimal64(4)"), DataTypes.DECIMAL(10, 4)),
+ field("user_date", dbType("Date"), DataTypes.DATE()),
+ field("timestamp6_col", dbType("DateTime(6)"), DataTypes.TIMESTAMP(6)),
+ field("decimal_column", dbType("Decimal(3,1)"), DataTypes.DECIMAL(3, 1)),
+ field("bool_flag", dbType("Bool"), DataTypes.BOOLEAN()),
+ field("message", dbType("String"), DataTypes.VARCHAR(100)));
+ }
+
+ @Override
+ protected List getTestData() {
+ TimeZone timeZone = TimeZone.getTimeZone("GTM+0");
+ TimeZone.setDefault(timeZone);
+ return Arrays.asList(
+ Row.of(
+ 1L,
+ (byte) 1,
+ (short) -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ BigDecimal.valueOf(100.1234),
+ LocalDate.parse("2023-01-01"),
+ LocalDateTime.parse("2020-01-01T15:35:00.123456"),
+ BigDecimal.valueOf(-99.9),
+ true,
+ "this is a test message"),
+ Row.of(
+ 2L,
+ (byte) 2,
+ (short) 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ BigDecimal.valueOf(101.1234),
+ LocalDate.parse("2023-01-02"),
+ LocalDateTime.parse("2020-01-01T15:36:01.123456"),
+ BigDecimal.valueOf(99.9),
+ false,
+ "this is a test message"));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableRow.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableRow.java
new file mode 100644
index 000000000..830d00a55
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableRow.java
@@ -0,0 +1,34 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.table;
+
+import org.apache.flink.connector.jdbc.testutils.tables.TableField;
+import org.apache.flink.connector.jdbc.testutils.tables.TableRow;
+
+import java.util.stream.Collectors;
+
+/** ClickhouseTableRow . */
+public class ClickhouseTableRow extends TableRow {
+
+ public ClickhouseTableRow(String name, TableField[] fields) {
+ super(name, fields);
+ }
+
+ @Override
+ public String getCreateQuery() {
+ String pkFields =
+ getStreamFields()
+ .filter(TableField::isPkField)
+ .map(TableField::getName)
+ .collect(Collectors.joining(", "));
+ return String.format(
+ "CREATE TABLE %s (%s) %s PRIMARY KEY (%s)",
+ getTableName(),
+ getStreamFields().map(TableField::asString).collect(Collectors.joining(", ")),
+ "ENGINE = MergeTree",
+ pkFields);
+ }
+
+ @Override
+ protected String getDeleteFromQuery() {
+ return String.format("truncate table %s", getTableName());
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableSinkITCase.java
new file mode 100644
index 000000000..d28bd389f
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickhouseTableSinkITCase.java
@@ -0,0 +1,95 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse.table;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseTestBase;
+import org.apache.flink.connector.jdbc.databases.clickhouse.dialect.ClickHouseDialect;
+import org.apache.flink.connector.jdbc.table.JdbcDynamicTableSinkITCase;
+import org.apache.flink.connector.jdbc.testutils.tables.TableRow;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.types.Row;
+
+import java.math.BigDecimal;
+import java.util.Arrays;
+import java.util.List;
+import java.util.TimeZone;
+
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.ckTableRow;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.dbType;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.field;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.pkField;
+
+/** The Table Sink ITCase for {@link ClickHouseDialect}. */
+class ClickhouseTableSinkITCase extends JdbcDynamicTableSinkITCase implements ClickHouseTestBase {
+ @Override
+ protected TableRow createUpsertOutputTable() {
+ return ckTableRow(
+ "dynamicSinkForUpsert",
+ pkField("cnt", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ field("lencnt", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ pkField("cTag", DataTypes.INT().notNull()),
+ field("ts", dbType("DateTime"), DataTypes.TIMESTAMP()));
+ }
+
+ @Override
+ protected TableRow createAppendOutputTable() {
+ TimeZone timeZone = TimeZone.getTimeZone("GTM+0");
+ TimeZone.setDefault(timeZone);
+ return ckTableRow(
+ "dynamicSinkForAppend",
+ field("id", DataTypes.INT().notNull()),
+ field("num", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ field("ts", dbType("DateTime64"), DataTypes.TIMESTAMP()));
+ }
+
+ @Override
+ protected TableRow createBatchOutputTable() {
+ return ckTableRow(
+ "dynamicSinkForBatch",
+ field("NAME", DataTypes.VARCHAR(20).notNull()),
+ field("SCORE", dbType("Int64"), DataTypes.BIGINT().notNull()));
+ }
+
+ @Override
+ protected TableRow createUserOutputTable() {
+ return ckTableRow(
+ "USER_TABLE",
+ pkField("user_id", DataTypes.VARCHAR(20).notNull()),
+ field("user_name", DataTypes.VARCHAR(20).notNull()),
+ field("email", DataTypes.VARCHAR(255)),
+ field("balance", DataTypes.DECIMAL(18, 2)),
+ field("balance2", DataTypes.DECIMAL(18, 2)));
+ }
+
+ @Override
+ protected TableRow createRealOutputTable() {
+ return ckTableRow("REAL_TABLE", field("real_data", dbType("REAL"), DataTypes.FLOAT()));
+ }
+
+ @Override
+ protected TableRow createCheckpointOutputTable() {
+ return ckTableRow(
+ "checkpointTable", field("id", dbType("Int64"), DataTypes.BIGINT().notNull()));
+ }
+
+ @Override
+ protected List testUserData() {
+ return Arrays.asList(
+ Row.of(
+ "user1",
+ "Tom",
+ "tom123@gmail.com",
+ new BigDecimal("8.1"),
+ new BigDecimal("16.2")),
+ Row.of(
+ "user3",
+ "Bailey",
+ "bailey@qq.com",
+ new BigDecimal("9.99"),
+ new BigDecimal("19.98")),
+ Row.of(
+ "user4",
+ "Tina",
+ "tina@gmail.com",
+ new BigDecimal("11.3"),
+ new BigDecimal("22.6")));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/UnsignedTypeConversionITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/UnsignedTypeConversionITCase.java
new file mode 100644
index 000000000..10ea067e7
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/UnsignedTypeConversionITCase.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.databases.clickhouse.table;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseTestBase;
+import org.apache.flink.connector.jdbc.testutils.TableManaged;
+import org.apache.flink.connector.jdbc.testutils.tables.TableRow;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.CloseableIterator;
+import org.apache.flink.util.CollectionUtil;
+
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import static java.lang.String.format;
+import static java.lang.String.join;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.ckTableRow;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.dbType;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.field;
+import static org.apache.flink.connector.jdbc.testutils.tables.TableBuilder.pkField;
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Test unsigned type conversion between Flink and JDBC driver ClickHouse, the test underlying use
+ * ClickHouse to mock a DB.
+ */
+class UnsignedTypeConversionITCase extends AbstractTestBase implements ClickHouseTestBase {
+
+ private static final String TABLE_SOURCE = "jdbc_source";
+ private static final String TABLE_SINK = "jdbc_sink";
+ private static final String TABLE_DATA = "data";
+ private static final TableRow TABLE =
+ ckTableRow(
+ "unsigned_test",
+ pkField("id", dbType("Int64"), DataTypes.BIGINT().notNull()),
+ field("small_u", dbType("UInt8"), DataTypes.SMALLINT().notNull()),
+ field("int_u", dbType("UInt16"), DataTypes.INT().notNull()),
+ field("bigint_u", dbType("UInt32"), DataTypes.BIGINT().notNull()));
+
+ public List getManagedTables() {
+ return Collections.singletonList(TABLE);
+ }
+
+ private static final List ROW =
+ Arrays.asList(
+ Row.of(1L, (short) 0, 0, 0L), Row.of(2L, (short) 255, 65535, 4294967295L));
+
+ @Test
+ void testUnsignedType() throws Exception {
+ try (Connection con = getMetadata().getConnection()) {
+ StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
+ TableEnvironment tableEnv = StreamTableEnvironment.create(sEnv);
+ createFlinkTable(tableEnv);
+ prepareData(tableEnv);
+
+ // write data to db
+ String columns = join(",", TABLE.getTableFields());
+ tableEnv.executeSql(
+ format(
+ "insert into %s select %s from %s",
+ TABLE_SINK, columns, TABLE_DATA))
+ .await();
+
+ // read data from db using jdbc connection and compare
+ List selectAll = TABLE.selectAllTable(con);
+ assertThat(selectAll).isEqualTo(ROW);
+
+ // read data from db using flink and compare
+ String sql = format("select %s from %s", columns, TABLE_SOURCE);
+ CloseableIterator collected = tableEnv.executeSql(sql).collect();
+ List result = CollectionUtil.iteratorToList(collected);
+ assertThat(result).isEqualTo(ROW);
+ }
+ }
+
+ private void createFlinkTable(TableEnvironment tableEnv) {
+ tableEnv.executeSql(TABLE.getCreateQueryForFlink(getMetadata(), TABLE_SOURCE));
+ tableEnv.executeSql(TABLE.getCreateQueryForFlink(getMetadata(), TABLE_SINK));
+ }
+
+ private void prepareData(TableEnvironment tableEnv) {
+ Table dataTable = tableEnv.fromValues(DataTypes.ROW(TABLE.getTableDataFields()), ROW);
+ tableEnv.createTemporaryView(TABLE_DATA, dataTable);
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/table/JdbcDynamicTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/table/JdbcDynamicTableSinkITCase.java
index 1973a1856..052a76e1c 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/table/JdbcDynamicTableSinkITCase.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/table/JdbcDynamicTableSinkITCase.java
@@ -213,6 +213,9 @@ void testReal() throws Exception {
@Test
void testUpsert() throws Exception {
+ if (!getMetadata().supportUpdate()) {
+ return;
+ }
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().enableObjectReuse();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
@@ -322,6 +325,9 @@ void testBatchSink() throws Exception {
@Test
void testReadingFromChangelogSource() throws Exception {
+ if (!getMetadata().supportUpdate()) {
+ return;
+ }
TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance().build());
String dataId = TestValuesTableFactory.registerData(TestData.userChangelog());
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/DatabaseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/DatabaseMetadata.java
index 30ba9cc39..363e5f640 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/DatabaseMetadata.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/DatabaseMetadata.java
@@ -44,6 +44,10 @@ public interface DatabaseMetadata extends Serializable {
String getVersion();
+ default boolean supportUpdate() {
+ return true;
+ }
+
default SerializableSupplier getXaSourceSupplier() {
return this::buildXaDataSource;
}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
new file mode 100644
index 000000000..e4d212cdd
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+import org.apache.flink.util.FlinkRuntimeException;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+/** Clickhouse database for testing. */
+public class ClickHouseDatabase extends DatabaseExtension implements ClickHouseImages {
+
+ private static final ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
+
+ private static ClickHouseMetadata metadata;
+
+ public static ClickHouseMetadata getMetadata() {
+ if (!CONTAINER.isRunning()) {
+ throw new FlinkRuntimeException("Container is stopped.");
+ }
+ if (metadata == null) {
+ metadata = new ClickHouseMetadata(CONTAINER, false);
+ }
+ return metadata;
+ }
+
+ @Override
+ protected DatabaseMetadata startDatabase() throws Exception {
+ CONTAINER.start();
+ return getMetadata();
+ }
+
+ @Override
+ protected void stopDatabase() throws Exception {
+ CONTAINER.stop();
+ metadata = null;
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseImages.java
new file mode 100644
index 000000000..64d097243
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseImages.java
@@ -0,0 +1,11 @@
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.testcontainers.utility.DockerImageName;
+
+/** clickhouse images. */
+public interface ClickHouseImages {
+
+ DockerImageName CLICKHOUSE_IMAGE_23 =
+ DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
+ .asCompatibleSubstituteFor("yandex/clickhouse-server");
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseMetadata.java
new file mode 100644
index 000000000..b08625178
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseMetadata.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+import javax.sql.XADataSource;
+
+/** clickhouse Metadata. */
+public class ClickHouseMetadata implements DatabaseMetadata {
+
+ private final String username;
+ private final String password;
+ private final String url;
+ private final String driver;
+ private final String version;
+ private final boolean xaEnabled;
+
+ public ClickHouseMetadata(ClickHouseContainer container) {
+ this(container, false);
+ }
+
+ public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
+ this.username = container.getUsername();
+ this.password = container.getPassword();
+ this.url = container.getJdbcUrl();
+ this.driver = container.getDriverClassName();
+ this.version = container.getDockerImageName();
+ this.xaEnabled = hasXaEnabled;
+ }
+
+ @Override
+ public String getJdbcUrl() {
+ return this.url;
+ }
+
+ @Override
+ public String getJdbcUrlWithCredentials() {
+ return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
+ }
+
+ @Override
+ public String getUsername() {
+ return this.username;
+ }
+
+ @Override
+ public String getPassword() {
+ return this.password;
+ }
+
+ @Override
+ public XADataSource buildXaDataSource() {
+ return null;
+ }
+
+ @Override
+ public String getDriverClass() {
+ return this.driver;
+ }
+
+ @Override
+ public String getVersion() {
+ return this.version;
+ }
+
+ @Override
+ public boolean supportUpdate() {
+ return false;
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBase.java
index d8cbd7937..8599afa13 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBase.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBase.java
@@ -69,7 +69,7 @@ public String getTableName() {
return name;
}
- private Stream getStreamFields() {
+ protected Stream getStreamFields() {
return Arrays.stream(this.fields);
}
@@ -291,4 +291,8 @@ protected T getNullable(ResultSet rs, FunctionWithException T getNullable(ResultSet rs, T value) throws SQLException {
return rs.wasNull() ? null : value;
}
+
+ public TableField[] getFields() {
+ return fields;
+ }
}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBuilder.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBuilder.java
index 6a8f80d3b..18838cfa8 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBuilder.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/tables/TableBuilder.java
@@ -18,6 +18,7 @@
package org.apache.flink.connector.jdbc.testutils.tables;
+import org.apache.flink.connector.jdbc.databases.clickhouse.table.ClickhouseTableRow;
import org.apache.flink.table.types.DataType;
/** Table builder. * */
@@ -51,4 +52,8 @@ private static TableField createField(
String name, TableField.DbType dbType, DataType dataType, boolean pkField) {
return new TableField(name, dataType, dbType, pkField);
}
+
+ public static ClickhouseTableRow ckTableRow(String name, TableField... fields) {
+ return new ClickhouseTableRow(name, fields);
+ }
}
From 21d15e0b39efbb8d4778e82c38ec48a10ea8bece Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 18 May 2023 23:50:10 +0800
Subject: [PATCH 02/24] [FLINK-32068] connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 +++++
.../clickhouse/ClickHouseDialectFactory.java | 19 ++
.../converter/ClickhouseRowConvert.java | 19 --
.../clickhouse/ClickHouseDatabase.java | 25 ++
.../clickhouse/ClickHouseImages.java | 4 +
.../clickhouse/ClickHouseMetadata.java | 66 ++++
.../clickhouse/ClickHouseDialectTypeTest.java | 41 +++
.../ClickHouseTableSourceITCase.java | 180 +++++++++++
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ++++++++++++++++++
9 files changed, 722 insertions(+), 19 deletions(-)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
new file mode 100644
index 000000000..f4c359afe
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
@@ -0,0 +1,87 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** clickhouse dialect */
+public class ClickHouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
+ private static final int MAX_TIMESTAMP_PRECISION = 9;
+ private static final int MIN_TIMESTAMP_PRECISION = 0;
+
+ // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
+ private static final int MAX_DECIMAL_PRECISION = 76;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickHouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "ClickHouse";
+ }
+
+ @Override
+ public Optional timestampPrecisionRange() {
+ return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
+ }
+
+ @Override
+ public Optional decimalPrecisionRange() {
+ return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
new file mode 100644
index 000000000..fcb27a8e6
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** clickhouse dialect factory */
+@Internal
+public class ClickHouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickHouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
deleted file mode 100644
index f696111b7..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.types.logical.RowType;
-
-/** */
-public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- @Override
- public String converterName() {
- return "Clickhouse";
- }
-
- public ClickhouseRowConvert(RowType rowType) {
- super(rowType);
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
new file mode 100644
index 000000000..0910fa0b9
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
@@ -0,0 +1,25 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+import org.apache.flink.connector.jdbc.databases.DatabaseTest;
+
+import org.testcontainers.containers.ClickHouseContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+/** clickhouse database for testing. */
+@Testcontainers
+public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
+
+ @Container
+ ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(
+ DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
+ .asCompatibleSubstituteFor("yandex/clickhouse-server"));
+
+ @Override
+ default DatabaseMetadata getMetadata() {
+ return new ClickHouseMetadata(CONTAINER);
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
new file mode 100644
index 000000000..38ac99280
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
@@ -0,0 +1,4 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+/** clickhouse images */
+public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
new file mode 100644
index 000000000..c5c83988a
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
@@ -0,0 +1,66 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+import javax.sql.XADataSource;
+
+/** clickhouse metadata */
+public class ClickHouseMetadata implements DatabaseMetadata {
+
+ private final String username;
+ private final String password;
+ private final String url;
+ private final String driver;
+ private final String version;
+ private final boolean xaEnabled;
+
+ public ClickHouseMetadata(ClickHouseContainer container) {
+ this(container, false);
+ }
+
+ public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
+ this.username = container.getUsername();
+ this.password = container.getPassword();
+ this.url = container.getJdbcUrl();
+ this.driver = container.getDriverClassName();
+ this.version = container.getDockerImageName();
+ this.xaEnabled = hasXaEnabled;
+ }
+
+ @Override
+ public String getJdbcUrl() {
+ return this.url;
+ }
+
+ @Override
+ public String getJdbcUrlWithCredentials() {
+ return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
+ }
+
+ @Override
+ public String getUsername() {
+ return this.username;
+ }
+
+ @Override
+ public String getPassword() {
+ return this.password;
+ }
+
+ @Override
+ public XADataSource buildXaDataSource() {
+ return null;
+ }
+
+ @Override
+ public String getDriverClass() {
+ return this.driver;
+ }
+
+ @Override
+ public String getVersion() {
+ return this.version;
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
new file mode 100644
index 000000000..7bcf6d267
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
@@ -0,0 +1,41 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
+public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
+
+ @Override
+ protected String testDialect() {
+ return "clickhouse";
+ }
+
+ @Override
+ protected List testData() {
+ return Arrays.asList(
+ createTestItem("CHAR"),
+ createTestItem("VARCHAR"),
+ createTestItem("BOOLEAN"),
+ createTestItem("TINYINT"),
+ createTestItem("SMALLINT"),
+ createTestItem("INTEGER"),
+ createTestItem("BIGINT"),
+ createTestItem("FLOAT"),
+ createTestItem("DOUBLE"),
+ createTestItem("DECIMAL(10, 4)"),
+ createTestItem("DECIMAL(38, 18)"),
+ createTestItem("DATE"),
+ createTestItem("TIMESTAMP(3)"),
+ createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
+ createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
+
+ // Not valid data
+ createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
+ createTestItem(
+ "VARBINARY(10)",
+ "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
new file mode 100644
index 000000000..aea4b14b1
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
@@ -0,0 +1,180 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.CollectionUtil;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/** The Table Source ITCase for {@link ClickHouseDialect}. */
+class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ private static final String INPUT_TABLE = "clickhouse_test_table";
+
+ private static StreamExecutionEnvironment env;
+ private static TableEnvironment tEnv;
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.execute(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_uint8 UInt8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_uint16 UInt16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_uint32 UInt32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
+ }
+ }
+
+ @BeforeEach
+ void before() throws Exception {
+ env = StreamExecutionEnvironment.getExecutionEnvironment();
+ tEnv = StreamTableEnvironment.create(env);
+ }
+
+ @Test
+ void testJdbcSource() throws Exception {
+ createFlinkTable();
+ Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ @Test
+ void testProject() throws Exception {
+ createFlinkTable();
+ Iterator collected =
+ tEnv.executeSql(
+ "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
+ + INPUT_TABLE)
+ .collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ private void createFlinkTable() {
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_uint8 SMALLINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_uint16 INTEGER NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_uint32 BIGINT NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(2) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "decimal64_column DECIMAL(18,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + INPUT_TABLE
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
new file mode 100644
index 000000000..0d0de2e65
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
@@ -0,0 +1,300 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.TableResult;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Timestamp;
+
+import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
+import static org.apache.flink.table.api.Expressions.row;
+
+/** The Table Sink ITCase for {@link ClickHouseDialect}. */
+class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
+ public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
+ public static final String OUTPUT_TABLE4 = "REAL_TABLE";
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE3
+ + " (user_id Int8,"
+ + "message String,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE4
+ + " (user_id Int8,"
+ + "real_data Float32,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ TestValuesTableFactory.clearAllData();
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute("DROP TABLE " + OUTPUT_TABLE1);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE3);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE4);
+ }
+ }
+
+ @Test
+ void testAllDataTypes() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
+
+ tEnv.createTemporaryView(
+ "myTable",
+ tEnv.fromValues(
+ DataTypes.ROW(
+ DataTypes.FIELD("user_id", DataTypes.TINYINT()),
+ DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
+ DataTypes.FIELD("user_id_int32", DataTypes.INT()),
+ DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
+ DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
+ DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
+ DataTypes.FIELD("user_date", DataTypes.DATE()),
+ DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
+ DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
+ DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
+ DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
+ DataTypes.FIELD("message", DataTypes.STRING())),
+ row(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ row(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")));
+
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(6) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE1
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
+
+ check(
+ new Row[] {
+ Row.of(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ Row.of(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE1,
+ new String[] {
+ "user_id",
+ "user_id_int16",
+ "user_id_int32",
+ "user_id_int64",
+ "price_float32",
+ "price_float64",
+ "user_date",
+ "user_timestamp",
+ "decimal_column",
+ "decimal32_column",
+ "bool_flag",
+ "message"
+ });
+ }
+
+ @Test
+ void testStreamSink() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
+
+ tEnv.executeSql(
+ "CREATE TABLE upsertSink ("
+ + " real_data float"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE4
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
+ check(
+ new Row[] {Row.of(1.1f)},
+ getMetadata().getJdbcUrlWithCredentials(),
+ "REAL_TABLE",
+ new String[] {"real_data"});
+ }
+
+ @Test
+ void testBatchSink() throws Exception {
+ TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
+ tEnv.executeSql(
+ "CREATE TABLE USER_RESULT("
+ + "user_id BIGINT,"
+ + "message VARCHAR"
+ + ") WITH ( "
+ + "'connector' = 'jdbc',"
+ + "'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + "'table-name' = '"
+ + OUTPUT_TABLE3
+ + "',"
+ + "'sink.buffer-flush.max-rows' = '10',"
+ + "'sink.buffer-flush.interval' = '300ms',"
+ + "'sink.max-retries' = '4'"
+ + ")");
+
+ TableResult tableResult =
+ tEnv.executeSql(
+ "INSERT INTO USER_RESULT\n"
+ + "SELECT user_id, message "
+ + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
+ + "(42, 'Kim'), (1, 'Bob')) "
+ + "AS UserCountTable(user_id, message)");
+ tableResult.await();
+
+ check(
+ new Row[] {
+ Row.of("Bob", 1),
+ Row.of("Tom", 22),
+ Row.of("Kim", 42),
+ Row.of("Kim", 42),
+ Row.of("Bob", 1)
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE3,
+ new String[] {"message", "user_id"});
+ }
+}
From 223a9cb59517ce8ab06331bf1d84e2501631eebd Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Mon, 22 May 2023 21:50:09 +0800
Subject: [PATCH 03/24] [FLINK-32068] connector jdbc support clickhouse
---
.../connector/jdbc/dialect/clickhouse/ClickHouseDialect.java | 2 +-
.../jdbc/dialect/clickhouse/ClickHouseDialectFactory.java | 2 +-
.../connector/jdbc/databases/clickhouse/ClickHouseImages.java | 2 +-
.../connector/jdbc/databases/clickhouse/ClickHouseMetadata.java | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
index f4c359afe..df9fa9622 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
@@ -10,7 +10,7 @@
import java.util.Optional;
import java.util.Set;
-/** clickhouse dialect */
+/** clickhouse dialect. */
public class ClickHouseDialect extends AbstractDialect {
private static final long serialVersionUID = 1L;
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
index fcb27a8e6..7676c7fca 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
@@ -4,7 +4,7 @@
import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-/** clickhouse dialect factory */
+/** clickhouse dialect factory. */
@Internal
public class ClickHouseDialectFactory implements JdbcDialectFactory {
@Override
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
index 38ac99280..a12b13383 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
@@ -1,4 +1,4 @@
package org.apache.flink.connector.jdbc.databases.clickhouse;
-/** clickhouse images */
+/** clickhouse images. */
public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
index c5c83988a..3a292cc3c 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
@@ -6,7 +6,7 @@
import javax.sql.XADataSource;
-/** clickhouse metadata */
+/** clickhouse metadata. */
public class ClickHouseMetadata implements DatabaseMetadata {
private final String username;
From b0e7b37b7b1ee9eff9d14e2a42cf630b82b95414 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 13:09:07 +0800
Subject: [PATCH 04/24] [FLINK-32068] connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 -----
.../clickhouse/ClickHouseDialectFactory.java | 19 --
.../clickhouse/ClickHouseDatabase.java | 25 --
.../clickhouse/ClickHouseImages.java | 4 -
.../clickhouse/ClickHouseMetadata.java | 66 ----
.../clickhouse/ClickHouseDialectTypeTest.java | 41 ---
.../ClickHouseTableSourceITCase.java | 180 -----------
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ------------------
.../clickhouse/ClickhouseDatabase.java | 55 ++++
9 files changed, 55 insertions(+), 722 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
deleted file mode 100644
index df9fa9622..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** clickhouse dialect. */
-public class ClickHouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
- private static final int MAX_TIMESTAMP_PRECISION = 9;
- private static final int MIN_TIMESTAMP_PRECISION = 0;
-
- // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
- private static final int MAX_DECIMAL_PRECISION = 76;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickHouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "ClickHouse";
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
deleted file mode 100644
index 7676c7fca..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** clickhouse dialect factory. */
-@Internal
-public class ClickHouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickHouseDialect();
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
deleted file mode 100644
index 0910fa0b9..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-import org.apache.flink.connector.jdbc.databases.DatabaseTest;
-
-import org.testcontainers.containers.ClickHouseContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-import org.testcontainers.utility.DockerImageName;
-
-/** clickhouse database for testing. */
-@Testcontainers
-public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
-
- @Container
- ClickHouseContainer CONTAINER =
- new ClickHouseContainer(
- DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
- .asCompatibleSubstituteFor("yandex/clickhouse-server"));
-
- @Override
- default DatabaseMetadata getMetadata() {
- return new ClickHouseMetadata(CONTAINER);
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
deleted file mode 100644
index a12b13383..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-/** clickhouse images. */
-public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
deleted file mode 100644
index 3a292cc3c..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-import javax.sql.XADataSource;
-
-/** clickhouse metadata. */
-public class ClickHouseMetadata implements DatabaseMetadata {
-
- private final String username;
- private final String password;
- private final String url;
- private final String driver;
- private final String version;
- private final boolean xaEnabled;
-
- public ClickHouseMetadata(ClickHouseContainer container) {
- this(container, false);
- }
-
- public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
- this.username = container.getUsername();
- this.password = container.getPassword();
- this.url = container.getJdbcUrl();
- this.driver = container.getDriverClassName();
- this.version = container.getDockerImageName();
- this.xaEnabled = hasXaEnabled;
- }
-
- @Override
- public String getJdbcUrl() {
- return this.url;
- }
-
- @Override
- public String getJdbcUrlWithCredentials() {
- return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
- }
-
- @Override
- public String getUsername() {
- return this.username;
- }
-
- @Override
- public String getPassword() {
- return this.password;
- }
-
- @Override
- public XADataSource buildXaDataSource() {
- return null;
- }
-
- @Override
- public String getDriverClass() {
- return this.driver;
- }
-
- @Override
- public String getVersion() {
- return this.version;
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
deleted file mode 100644
index 7bcf6d267..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
-
-import java.util.Arrays;
-import java.util.List;
-
-/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
-public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
-
- @Override
- protected String testDialect() {
- return "clickhouse";
- }
-
- @Override
- protected List testData() {
- return Arrays.asList(
- createTestItem("CHAR"),
- createTestItem("VARCHAR"),
- createTestItem("BOOLEAN"),
- createTestItem("TINYINT"),
- createTestItem("SMALLINT"),
- createTestItem("INTEGER"),
- createTestItem("BIGINT"),
- createTestItem("FLOAT"),
- createTestItem("DOUBLE"),
- createTestItem("DECIMAL(10, 4)"),
- createTestItem("DECIMAL(38, 18)"),
- createTestItem("DATE"),
- createTestItem("TIMESTAMP(3)"),
- createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
- createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
-
- // Not valid data
- createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
- createTestItem(
- "VARBINARY(10)",
- "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
deleted file mode 100644
index aea4b14b1..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
+++ /dev/null
@@ -1,180 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-import org.apache.flink.util.CollectionUtil;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Iterator;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** The Table Source ITCase for {@link ClickHouseDialect}. */
-class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- private static final String INPUT_TABLE = "clickhouse_test_table";
-
- private static StreamExecutionEnvironment env;
- private static TableEnvironment tEnv;
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.execute(
- "CREATE TABLE "
- + INPUT_TABLE
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_uint8 UInt8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_uint16 UInt16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_uint32 UInt32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
- }
- }
-
- @BeforeEach
- void before() throws Exception {
- env = StreamExecutionEnvironment.getExecutionEnvironment();
- tEnv = StreamTableEnvironment.create(env);
- }
-
- @Test
- void testJdbcSource() throws Exception {
- createFlinkTable();
- Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- @Test
- void testProject() throws Exception {
- createFlinkTable();
- Iterator collected =
- tEnv.executeSql(
- "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
- + INPUT_TABLE)
- .collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- private void createFlinkTable() {
- tEnv.executeSql(
- "CREATE TABLE "
- + INPUT_TABLE
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_uint8 SMALLINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_uint16 INTEGER NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_uint32 BIGINT NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(2) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "decimal64_column DECIMAL(18,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + INPUT_TABLE
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
deleted file mode 100644
index 0d0de2e65..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
+++ /dev/null
@@ -1,300 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.api.EnvironmentSettings;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.table.planner.factories.TestValuesTableFactory;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Timestamp;
-
-import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
-import static org.apache.flink.table.api.Expressions.row;
-
-/** The Table Sink ITCase for {@link ClickHouseDialect}. */
-class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
- public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
- public static final String OUTPUT_TABLE4 = "REAL_TABLE";
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE3
- + " (user_id Int8,"
- + "message String,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE4
- + " (user_id Int8,"
- + "real_data Float32,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- TestValuesTableFactory.clearAllData();
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute("DROP TABLE " + OUTPUT_TABLE1);
- stat.execute("DROP TABLE " + OUTPUT_TABLE3);
- stat.execute("DROP TABLE " + OUTPUT_TABLE4);
- }
- }
-
- @Test
- void testAllDataTypes() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
-
- tEnv.createTemporaryView(
- "myTable",
- tEnv.fromValues(
- DataTypes.ROW(
- DataTypes.FIELD("user_id", DataTypes.TINYINT()),
- DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
- DataTypes.FIELD("user_id_int32", DataTypes.INT()),
- DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
- DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
- DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
- DataTypes.FIELD("user_date", DataTypes.DATE()),
- DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
- DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
- DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
- DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
- DataTypes.FIELD("message", DataTypes.STRING())),
- row(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- row(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")));
-
- tEnv.executeSql(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(6) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE1
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
-
- check(
- new Row[] {
- Row.of(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12")
- .toInstant()
- .toString()
- .replace("Z", ""),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- Row.of(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23")
- .toInstant()
- .toString()
- .replace("Z", ""),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE1,
- new String[] {
- "user_id",
- "user_id_int16",
- "user_id_int32",
- "user_id_int64",
- "price_float32",
- "price_float64",
- "user_date",
- "user_timestamp",
- "decimal_column",
- "decimal32_column",
- "bool_flag",
- "message"
- });
- }
-
- @Test
- void testStreamSink() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
-
- tEnv.executeSql(
- "CREATE TABLE upsertSink ("
- + " real_data float"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE4
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
- check(
- new Row[] {Row.of(1.1f)},
- getMetadata().getJdbcUrlWithCredentials(),
- "REAL_TABLE",
- new String[] {"real_data"});
- }
-
- @Test
- void testBatchSink() throws Exception {
- TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
- tEnv.executeSql(
- "CREATE TABLE USER_RESULT("
- + "user_id BIGINT,"
- + "message VARCHAR"
- + ") WITH ( "
- + "'connector' = 'jdbc',"
- + "'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + "'table-name' = '"
- + OUTPUT_TABLE3
- + "',"
- + "'sink.buffer-flush.max-rows' = '10',"
- + "'sink.buffer-flush.interval' = '300ms',"
- + "'sink.max-retries' = '4'"
- + ")");
-
- TableResult tableResult =
- tEnv.executeSql(
- "INSERT INTO USER_RESULT\n"
- + "SELECT user_id, message "
- + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
- + "(42, 'Kim'), (1, 'Bob')) "
- + "AS UserCountTable(user_id, message)");
- tableResult.await();
-
- check(
- new Row[] {
- Row.of("Bob", 1),
- Row.of("Tom", 22),
- Row.of("Kim", 42),
- Row.of("Kim", 42),
- Row.of("Bob", 1)
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE3,
- new String[] {"message", "user_id"});
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
new file mode 100644
index 000000000..c1b092ae7
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+import org.apache.flink.util.FlinkRuntimeException;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+/** A Clickhouse database for testing. */
+public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
+
+ private static final ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
+
+ private static ClickHouseMetadata metadata;
+
+ public static ClickHouseMetadata getMetadata() {
+ if (!CONTAINER.isRunning()) {
+ throw new FlinkRuntimeException("Container is stopped.");
+ }
+ if (metadata == null) {
+ metadata = new ClickHouseMetadata(CONTAINER, false);
+ }
+ return metadata;
+ }
+
+ @Override
+ protected DatabaseMetadata startDatabase() throws Exception {
+ CONTAINER.start();
+ return getMetadata();
+ }
+
+ @Override
+ protected void stopDatabase() throws Exception {
+ CONTAINER.stop();
+ metadata = null;
+ }
+}
From d1f96df35a0543f5d794eaf2dd50575238efa616 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Wed, 12 Jul 2023 23:22:52 +0800
Subject: [PATCH 05/24] [FLINK-32068] connector jdbc support clickhouse
,support map types
---
.../clickhouse/ClickHouseDatabase.java | 2 +-
.../clickhouse/ClickhouseDatabase.java | 55 -------------------
2 files changed, 1 insertion(+), 56 deletions(-)
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
index e4d212cdd..50a3f56d4 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
@@ -23,7 +23,7 @@
import org.testcontainers.containers.ClickHouseContainer;
-/** Clickhouse database for testing. */
+/** A Clickhouse database for testing. */
public class ClickHouseDatabase extends DatabaseExtension implements ClickHouseImages {
private static final ClickHouseContainer CONTAINER =
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
deleted file mode 100644
index c1b092ae7..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
-import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
-import org.apache.flink.util.FlinkRuntimeException;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-/** A Clickhouse database for testing. */
-public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
-
- private static final ClickHouseContainer CONTAINER =
- new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
-
- private static ClickHouseMetadata metadata;
-
- public static ClickHouseMetadata getMetadata() {
- if (!CONTAINER.isRunning()) {
- throw new FlinkRuntimeException("Container is stopped.");
- }
- if (metadata == null) {
- metadata = new ClickHouseMetadata(CONTAINER, false);
- }
- return metadata;
- }
-
- @Override
- protected DatabaseMetadata startDatabase() throws Exception {
- CONTAINER.start();
- return getMetadata();
- }
-
- @Override
- protected void stopDatabase() throws Exception {
- CONTAINER.stop();
- metadata = null;
- }
-}
From 036a67fafa01f4d22a8f239b8a3252bdb38ce698 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 25 May 2023 01:12:10 +0800
Subject: [PATCH 06/24] [FLINK-32068]connector jdbc support clickhouse
---
.../clickhouse/ClickhouseDatabase.java | 55 +++++++++++++++++++
1 file changed, 55 insertions(+)
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
new file mode 100644
index 000000000..c1b092ae7
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+import org.apache.flink.util.FlinkRuntimeException;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+/** A Clickhouse database for testing. */
+public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
+
+ private static final ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
+
+ private static ClickHouseMetadata metadata;
+
+ public static ClickHouseMetadata getMetadata() {
+ if (!CONTAINER.isRunning()) {
+ throw new FlinkRuntimeException("Container is stopped.");
+ }
+ if (metadata == null) {
+ metadata = new ClickHouseMetadata(CONTAINER, false);
+ }
+ return metadata;
+ }
+
+ @Override
+ protected DatabaseMetadata startDatabase() throws Exception {
+ CONTAINER.start();
+ return getMetadata();
+ }
+
+ @Override
+ protected void stopDatabase() throws Exception {
+ CONTAINER.stop();
+ metadata = null;
+ }
+}
From c8decbeafa89e38bbb29b85acf539d18b7e30e4c Mon Sep 17 00:00:00 2001
From: leishuiyu
Date: Sat, 13 May 2023 18:04:12 +0800
Subject: [PATCH 07/24] [FLINK-32068] jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 +++++++++++++++++++
.../clickhouse/ClickhouseDialectFactory.java | 19 +++++
.../converter/ClickhouseRowConvert.java | 19 +++++
3 files changed, 116 insertions(+)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
new file mode 100644
index 000000000..0222e0337
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
@@ -0,0 +1,78 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** */
+public class ClickhouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
+ private static final int MAX_TIMESTAMP_PRECISION = 6;
+ private static final int MIN_TIMESTAMP_PRECISION = 1;
+
+ // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
+ private static final int MAX_DECIMAL_PRECISION = 65;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickhouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "Clickhouse";
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.VARBINARY,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
new file mode 100644
index 000000000..4924e4877
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** */
+@Internal
+public class ClickhouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickhouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
new file mode 100644
index 000000000..f696111b7
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.types.logical.RowType;
+
+/** */
+public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public String converterName() {
+ return "Clickhouse";
+ }
+
+ public ClickhouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+}
From 6d4a0d54e196b20af46563bf299a53e58dc9d423 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 00:02:25 +0800
Subject: [PATCH 08/24] [FLINK-32068]connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 -------------------
.../clickhouse/ClickhouseDialectFactory.java | 19 -----
.../clickhouse/ClickhouseDatabase.java | 55 -------------
3 files changed, 152 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
deleted file mode 100644
index 0222e0337..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** */
-public class ClickhouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
- private static final int MAX_DECIMAL_PRECISION = 65;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickhouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "Clickhouse";
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
deleted file mode 100644
index 4924e4877..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** */
-@Internal
-public class ClickhouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickhouseDialect();
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
deleted file mode 100644
index c1b092ae7..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
-import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
-import org.apache.flink.util.FlinkRuntimeException;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-/** A Clickhouse database for testing. */
-public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
-
- private static final ClickHouseContainer CONTAINER =
- new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
-
- private static ClickHouseMetadata metadata;
-
- public static ClickHouseMetadata getMetadata() {
- if (!CONTAINER.isRunning()) {
- throw new FlinkRuntimeException("Container is stopped.");
- }
- if (metadata == null) {
- metadata = new ClickHouseMetadata(CONTAINER, false);
- }
- return metadata;
- }
-
- @Override
- protected DatabaseMetadata startDatabase() throws Exception {
- CONTAINER.start();
- return getMetadata();
- }
-
- @Override
- protected void stopDatabase() throws Exception {
- CONTAINER.stop();
- metadata = null;
- }
-}
From 8b1df0421263345919b2a627abc9ab31038b2a5c Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 00:03:10 +0800
Subject: [PATCH 09/24] [FLINK-32068]connector jdbc support clickhouse
---
.../converter/ClickhouseRowConvert.java | 19 -------------------
1 file changed, 19 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
deleted file mode 100644
index f696111b7..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.types.logical.RowType;
-
-/** */
-public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- @Override
- public String converterName() {
- return "Clickhouse";
- }
-
- public ClickhouseRowConvert(RowType rowType) {
- super(rowType);
- }
-}
From 9c7544719aac3b67b69e0088ee3c4d9cd4a1fd74 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 18 May 2023 23:50:10 +0800
Subject: [PATCH 10/24] [FLINK-32068] connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 +++++
.../clickhouse/ClickHouseDialectFactory.java | 19 ++
.../converter/ClickHouseRowConvert.java | 85 +++++
.../clickhouse/ClickHouseDatabase.java | 25 ++
.../clickhouse/ClickHouseImages.java | 4 +
.../clickhouse/ClickHouseMetadata.java | 66 ++++
.../clickhouse/ClickHouseDialectTypeTest.java | 41 +++
.../ClickHouseTableSourceITCase.java | 180 +++++++++++
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ++++++++++++++++++
.../clickhouse/ClickHouseDatabase.java | 55 ----
10 files changed, 807 insertions(+), 55 deletions(-)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
new file mode 100644
index 000000000..f4c359afe
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
@@ -0,0 +1,87 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** clickhouse dialect */
+public class ClickHouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
+ private static final int MAX_TIMESTAMP_PRECISION = 9;
+ private static final int MIN_TIMESTAMP_PRECISION = 0;
+
+ // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
+ private static final int MAX_DECIMAL_PRECISION = 76;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickHouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "ClickHouse";
+ }
+
+ @Override
+ public Optional timestampPrecisionRange() {
+ return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
+ }
+
+ @Override
+ public Optional decimalPrecisionRange() {
+ return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
new file mode 100644
index 000000000..fcb27a8e6
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** clickhouse dialect factory */
+@Internal
+public class ClickHouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickHouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
new file mode 100644
index 000000000..a4ea4b2f1
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
@@ -0,0 +1,85 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.data.DecimalData;
+import org.apache.flink.table.data.StringData;
+import org.apache.flink.table.data.TimestampData;
+import org.apache.flink.table.types.logical.DecimalType;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+
+import com.clickhouse.data.value.UnsignedByte;
+import com.clickhouse.data.value.UnsignedInteger;
+import com.clickhouse.data.value.UnsignedShort;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+
+/**
+ * Runtime converter that responsible to convert between JDBC object and Flink internal object for
+ * ClickHouse.
+ */
+public class ClickHouseRowConvert extends AbstractJdbcRowConverter {
+ @Override
+ public String converterName() {
+ return "ClickHouse";
+ }
+
+ public ClickHouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+
+ @Override
+ protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
+ switch (type.getTypeRoot()) {
+ case NULL:
+ return null;
+ case BOOLEAN:
+ case FLOAT:
+ case DOUBLE:
+ return val -> val;
+ case TINYINT:
+ return val -> ((Byte) val).byteValue();
+ case SMALLINT:
+ return val ->
+ val instanceof UnsignedByte
+ ? ((UnsignedByte) val).shortValue()
+ : ((Short) val).shortValue();
+ case INTEGER:
+ return val ->
+ val instanceof UnsignedShort
+ ? ((UnsignedShort) val).intValue()
+ : ((Integer) val).intValue();
+ case BIGINT:
+ return jdbcField -> {
+ if (jdbcField instanceof UnsignedInteger) {
+ return ((UnsignedInteger) jdbcField).longValue();
+ } else if (jdbcField instanceof Long) {
+ return ((Long) jdbcField).longValue();
+ }
+ // UINT64 is not supported,the uint64 range exceeds the long range
+ throw new UnsupportedOperationException("Unsupported type:" + type);
+ };
+ case DECIMAL:
+ final int precision = ((DecimalType) type).getPrecision();
+ final int scale = ((DecimalType) type).getScale();
+ return val ->
+ val instanceof BigInteger
+ ? DecimalData.fromBigDecimal(
+ new BigDecimal((BigInteger) val, 0), precision, scale)
+ : DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
+ case CHAR:
+ case VARCHAR:
+ return val -> StringData.fromString((String) val);
+ case DATE:
+ return val -> Long.valueOf(((LocalDate) val).toEpochDay()).intValue();
+ case TIMESTAMP_WITHOUT_TIME_ZONE:
+ case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
+ return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
+ default:
+ return super.createInternalConverter(type);
+ }
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
new file mode 100644
index 000000000..0910fa0b9
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
@@ -0,0 +1,25 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+import org.apache.flink.connector.jdbc.databases.DatabaseTest;
+
+import org.testcontainers.containers.ClickHouseContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+/** clickhouse database for testing. */
+@Testcontainers
+public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
+
+ @Container
+ ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(
+ DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
+ .asCompatibleSubstituteFor("yandex/clickhouse-server"));
+
+ @Override
+ default DatabaseMetadata getMetadata() {
+ return new ClickHouseMetadata(CONTAINER);
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
new file mode 100644
index 000000000..38ac99280
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
@@ -0,0 +1,4 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+/** clickhouse images */
+public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
new file mode 100644
index 000000000..c5c83988a
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
@@ -0,0 +1,66 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+import javax.sql.XADataSource;
+
+/** clickhouse metadata */
+public class ClickHouseMetadata implements DatabaseMetadata {
+
+ private final String username;
+ private final String password;
+ private final String url;
+ private final String driver;
+ private final String version;
+ private final boolean xaEnabled;
+
+ public ClickHouseMetadata(ClickHouseContainer container) {
+ this(container, false);
+ }
+
+ public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
+ this.username = container.getUsername();
+ this.password = container.getPassword();
+ this.url = container.getJdbcUrl();
+ this.driver = container.getDriverClassName();
+ this.version = container.getDockerImageName();
+ this.xaEnabled = hasXaEnabled;
+ }
+
+ @Override
+ public String getJdbcUrl() {
+ return this.url;
+ }
+
+ @Override
+ public String getJdbcUrlWithCredentials() {
+ return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
+ }
+
+ @Override
+ public String getUsername() {
+ return this.username;
+ }
+
+ @Override
+ public String getPassword() {
+ return this.password;
+ }
+
+ @Override
+ public XADataSource buildXaDataSource() {
+ return null;
+ }
+
+ @Override
+ public String getDriverClass() {
+ return this.driver;
+ }
+
+ @Override
+ public String getVersion() {
+ return this.version;
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
new file mode 100644
index 000000000..7bcf6d267
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
@@ -0,0 +1,41 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
+public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
+
+ @Override
+ protected String testDialect() {
+ return "clickhouse";
+ }
+
+ @Override
+ protected List testData() {
+ return Arrays.asList(
+ createTestItem("CHAR"),
+ createTestItem("VARCHAR"),
+ createTestItem("BOOLEAN"),
+ createTestItem("TINYINT"),
+ createTestItem("SMALLINT"),
+ createTestItem("INTEGER"),
+ createTestItem("BIGINT"),
+ createTestItem("FLOAT"),
+ createTestItem("DOUBLE"),
+ createTestItem("DECIMAL(10, 4)"),
+ createTestItem("DECIMAL(38, 18)"),
+ createTestItem("DATE"),
+ createTestItem("TIMESTAMP(3)"),
+ createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
+ createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
+
+ // Not valid data
+ createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
+ createTestItem(
+ "VARBINARY(10)",
+ "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
new file mode 100644
index 000000000..aea4b14b1
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
@@ -0,0 +1,180 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.CollectionUtil;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/** The Table Source ITCase for {@link ClickHouseDialect}. */
+class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ private static final String INPUT_TABLE = "clickhouse_test_table";
+
+ private static StreamExecutionEnvironment env;
+ private static TableEnvironment tEnv;
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.execute(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_uint8 UInt8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_uint16 UInt16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_uint32 UInt32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
+ }
+ }
+
+ @BeforeEach
+ void before() throws Exception {
+ env = StreamExecutionEnvironment.getExecutionEnvironment();
+ tEnv = StreamTableEnvironment.create(env);
+ }
+
+ @Test
+ void testJdbcSource() throws Exception {
+ createFlinkTable();
+ Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ @Test
+ void testProject() throws Exception {
+ createFlinkTable();
+ Iterator collected =
+ tEnv.executeSql(
+ "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
+ + INPUT_TABLE)
+ .collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ private void createFlinkTable() {
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_uint8 SMALLINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_uint16 INTEGER NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_uint32 BIGINT NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(2) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "decimal64_column DECIMAL(18,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + INPUT_TABLE
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
new file mode 100644
index 000000000..0d0de2e65
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
@@ -0,0 +1,300 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.TableResult;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Timestamp;
+
+import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
+import static org.apache.flink.table.api.Expressions.row;
+
+/** The Table Sink ITCase for {@link ClickHouseDialect}. */
+class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
+ public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
+ public static final String OUTPUT_TABLE4 = "REAL_TABLE";
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE3
+ + " (user_id Int8,"
+ + "message String,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE4
+ + " (user_id Int8,"
+ + "real_data Float32,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ TestValuesTableFactory.clearAllData();
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute("DROP TABLE " + OUTPUT_TABLE1);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE3);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE4);
+ }
+ }
+
+ @Test
+ void testAllDataTypes() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
+
+ tEnv.createTemporaryView(
+ "myTable",
+ tEnv.fromValues(
+ DataTypes.ROW(
+ DataTypes.FIELD("user_id", DataTypes.TINYINT()),
+ DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
+ DataTypes.FIELD("user_id_int32", DataTypes.INT()),
+ DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
+ DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
+ DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
+ DataTypes.FIELD("user_date", DataTypes.DATE()),
+ DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
+ DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
+ DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
+ DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
+ DataTypes.FIELD("message", DataTypes.STRING())),
+ row(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ row(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")));
+
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(6) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE1
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
+
+ check(
+ new Row[] {
+ Row.of(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ Row.of(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE1,
+ new String[] {
+ "user_id",
+ "user_id_int16",
+ "user_id_int32",
+ "user_id_int64",
+ "price_float32",
+ "price_float64",
+ "user_date",
+ "user_timestamp",
+ "decimal_column",
+ "decimal32_column",
+ "bool_flag",
+ "message"
+ });
+ }
+
+ @Test
+ void testStreamSink() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
+
+ tEnv.executeSql(
+ "CREATE TABLE upsertSink ("
+ + " real_data float"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE4
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
+ check(
+ new Row[] {Row.of(1.1f)},
+ getMetadata().getJdbcUrlWithCredentials(),
+ "REAL_TABLE",
+ new String[] {"real_data"});
+ }
+
+ @Test
+ void testBatchSink() throws Exception {
+ TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
+ tEnv.executeSql(
+ "CREATE TABLE USER_RESULT("
+ + "user_id BIGINT,"
+ + "message VARCHAR"
+ + ") WITH ( "
+ + "'connector' = 'jdbc',"
+ + "'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + "'table-name' = '"
+ + OUTPUT_TABLE3
+ + "',"
+ + "'sink.buffer-flush.max-rows' = '10',"
+ + "'sink.buffer-flush.interval' = '300ms',"
+ + "'sink.max-retries' = '4'"
+ + ")");
+
+ TableResult tableResult =
+ tEnv.executeSql(
+ "INSERT INTO USER_RESULT\n"
+ + "SELECT user_id, message "
+ + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
+ + "(42, 'Kim'), (1, 'Bob')) "
+ + "AS UserCountTable(user_id, message)");
+ tableResult.await();
+
+ check(
+ new Row[] {
+ Row.of("Bob", 1),
+ Row.of("Tom", 22),
+ Row.of("Kim", 42),
+ Row.of("Kim", 42),
+ Row.of("Bob", 1)
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE3,
+ new String[] {"message", "user_id"});
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
deleted file mode 100644
index 50a3f56d4..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
-import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
-import org.apache.flink.util.FlinkRuntimeException;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-/** A Clickhouse database for testing. */
-public class ClickHouseDatabase extends DatabaseExtension implements ClickHouseImages {
-
- private static final ClickHouseContainer CONTAINER =
- new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
-
- private static ClickHouseMetadata metadata;
-
- public static ClickHouseMetadata getMetadata() {
- if (!CONTAINER.isRunning()) {
- throw new FlinkRuntimeException("Container is stopped.");
- }
- if (metadata == null) {
- metadata = new ClickHouseMetadata(CONTAINER, false);
- }
- return metadata;
- }
-
- @Override
- protected DatabaseMetadata startDatabase() throws Exception {
- CONTAINER.start();
- return getMetadata();
- }
-
- @Override
- protected void stopDatabase() throws Exception {
- CONTAINER.stop();
- metadata = null;
- }
-}
From 497549238ca01ddd414bd6aa6a0a93c7b539a0d6 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Mon, 22 May 2023 21:50:09 +0800
Subject: [PATCH 11/24] [FLINK-32068] connector jdbc support clickhouse
---
.../connector/jdbc/dialect/clickhouse/ClickHouseDialect.java | 2 +-
.../jdbc/dialect/clickhouse/ClickHouseDialectFactory.java | 2 +-
.../connector/jdbc/databases/clickhouse/ClickHouseImages.java | 2 +-
.../connector/jdbc/databases/clickhouse/ClickHouseMetadata.java | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
index f4c359afe..df9fa9622 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
@@ -10,7 +10,7 @@
import java.util.Optional;
import java.util.Set;
-/** clickhouse dialect */
+/** clickhouse dialect. */
public class ClickHouseDialect extends AbstractDialect {
private static final long serialVersionUID = 1L;
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
index fcb27a8e6..7676c7fca 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
@@ -4,7 +4,7 @@
import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-/** clickhouse dialect factory */
+/** clickhouse dialect factory. */
@Internal
public class ClickHouseDialectFactory implements JdbcDialectFactory {
@Override
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
index 38ac99280..a12b13383 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
@@ -1,4 +1,4 @@
package org.apache.flink.connector.jdbc.databases.clickhouse;
-/** clickhouse images */
+/** clickhouse images. */
public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
index c5c83988a..3a292cc3c 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
@@ -6,7 +6,7 @@
import javax.sql.XADataSource;
-/** clickhouse metadata */
+/** clickhouse metadata. */
public class ClickHouseMetadata implements DatabaseMetadata {
private final String username;
From 9ee3d1f7214c01e8d48fb707ef91db78ae185df8 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 25 May 2023 01:12:10 +0800
Subject: [PATCH 12/24] [FLINK-32068]connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 -----
.../clickhouse/ClickHouseDialectFactory.java | 19 --
.../clickhouse/ClickHouseDatabase.java | 25 --
.../clickhouse/ClickHouseImages.java | 4 -
.../clickhouse/ClickHouseMetadata.java | 66 ----
.../clickhouse/ClickHouseDialectTypeTest.java | 41 ---
.../ClickHouseTableSourceITCase.java | 180 -----------
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ------------------
.../clickhouse/ClickhouseDatabase.java | 55 ++++
9 files changed, 55 insertions(+), 722 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
deleted file mode 100644
index df9fa9622..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** clickhouse dialect. */
-public class ClickHouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
- private static final int MAX_TIMESTAMP_PRECISION = 9;
- private static final int MIN_TIMESTAMP_PRECISION = 0;
-
- // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
- private static final int MAX_DECIMAL_PRECISION = 76;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickHouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "ClickHouse";
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
deleted file mode 100644
index 7676c7fca..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** clickhouse dialect factory. */
-@Internal
-public class ClickHouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickHouseDialect();
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
deleted file mode 100644
index 0910fa0b9..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-import org.apache.flink.connector.jdbc.databases.DatabaseTest;
-
-import org.testcontainers.containers.ClickHouseContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-import org.testcontainers.utility.DockerImageName;
-
-/** clickhouse database for testing. */
-@Testcontainers
-public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
-
- @Container
- ClickHouseContainer CONTAINER =
- new ClickHouseContainer(
- DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
- .asCompatibleSubstituteFor("yandex/clickhouse-server"));
-
- @Override
- default DatabaseMetadata getMetadata() {
- return new ClickHouseMetadata(CONTAINER);
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
deleted file mode 100644
index a12b13383..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-/** clickhouse images. */
-public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
deleted file mode 100644
index 3a292cc3c..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-import javax.sql.XADataSource;
-
-/** clickhouse metadata. */
-public class ClickHouseMetadata implements DatabaseMetadata {
-
- private final String username;
- private final String password;
- private final String url;
- private final String driver;
- private final String version;
- private final boolean xaEnabled;
-
- public ClickHouseMetadata(ClickHouseContainer container) {
- this(container, false);
- }
-
- public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
- this.username = container.getUsername();
- this.password = container.getPassword();
- this.url = container.getJdbcUrl();
- this.driver = container.getDriverClassName();
- this.version = container.getDockerImageName();
- this.xaEnabled = hasXaEnabled;
- }
-
- @Override
- public String getJdbcUrl() {
- return this.url;
- }
-
- @Override
- public String getJdbcUrlWithCredentials() {
- return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
- }
-
- @Override
- public String getUsername() {
- return this.username;
- }
-
- @Override
- public String getPassword() {
- return this.password;
- }
-
- @Override
- public XADataSource buildXaDataSource() {
- return null;
- }
-
- @Override
- public String getDriverClass() {
- return this.driver;
- }
-
- @Override
- public String getVersion() {
- return this.version;
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
deleted file mode 100644
index 7bcf6d267..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
-
-import java.util.Arrays;
-import java.util.List;
-
-/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
-public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
-
- @Override
- protected String testDialect() {
- return "clickhouse";
- }
-
- @Override
- protected List testData() {
- return Arrays.asList(
- createTestItem("CHAR"),
- createTestItem("VARCHAR"),
- createTestItem("BOOLEAN"),
- createTestItem("TINYINT"),
- createTestItem("SMALLINT"),
- createTestItem("INTEGER"),
- createTestItem("BIGINT"),
- createTestItem("FLOAT"),
- createTestItem("DOUBLE"),
- createTestItem("DECIMAL(10, 4)"),
- createTestItem("DECIMAL(38, 18)"),
- createTestItem("DATE"),
- createTestItem("TIMESTAMP(3)"),
- createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
- createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
-
- // Not valid data
- createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
- createTestItem(
- "VARBINARY(10)",
- "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
deleted file mode 100644
index aea4b14b1..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
+++ /dev/null
@@ -1,180 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-import org.apache.flink.util.CollectionUtil;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Iterator;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** The Table Source ITCase for {@link ClickHouseDialect}. */
-class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- private static final String INPUT_TABLE = "clickhouse_test_table";
-
- private static StreamExecutionEnvironment env;
- private static TableEnvironment tEnv;
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.execute(
- "CREATE TABLE "
- + INPUT_TABLE
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_uint8 UInt8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_uint16 UInt16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_uint32 UInt32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
- }
- }
-
- @BeforeEach
- void before() throws Exception {
- env = StreamExecutionEnvironment.getExecutionEnvironment();
- tEnv = StreamTableEnvironment.create(env);
- }
-
- @Test
- void testJdbcSource() throws Exception {
- createFlinkTable();
- Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- @Test
- void testProject() throws Exception {
- createFlinkTable();
- Iterator collected =
- tEnv.executeSql(
- "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
- + INPUT_TABLE)
- .collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- private void createFlinkTable() {
- tEnv.executeSql(
- "CREATE TABLE "
- + INPUT_TABLE
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_uint8 SMALLINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_uint16 INTEGER NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_uint32 BIGINT NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(2) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "decimal64_column DECIMAL(18,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + INPUT_TABLE
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
deleted file mode 100644
index 0d0de2e65..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
+++ /dev/null
@@ -1,300 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.api.EnvironmentSettings;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.table.planner.factories.TestValuesTableFactory;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Timestamp;
-
-import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
-import static org.apache.flink.table.api.Expressions.row;
-
-/** The Table Sink ITCase for {@link ClickHouseDialect}. */
-class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
- public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
- public static final String OUTPUT_TABLE4 = "REAL_TABLE";
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE3
- + " (user_id Int8,"
- + "message String,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE4
- + " (user_id Int8,"
- + "real_data Float32,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- TestValuesTableFactory.clearAllData();
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute("DROP TABLE " + OUTPUT_TABLE1);
- stat.execute("DROP TABLE " + OUTPUT_TABLE3);
- stat.execute("DROP TABLE " + OUTPUT_TABLE4);
- }
- }
-
- @Test
- void testAllDataTypes() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
-
- tEnv.createTemporaryView(
- "myTable",
- tEnv.fromValues(
- DataTypes.ROW(
- DataTypes.FIELD("user_id", DataTypes.TINYINT()),
- DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
- DataTypes.FIELD("user_id_int32", DataTypes.INT()),
- DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
- DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
- DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
- DataTypes.FIELD("user_date", DataTypes.DATE()),
- DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
- DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
- DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
- DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
- DataTypes.FIELD("message", DataTypes.STRING())),
- row(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- row(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")));
-
- tEnv.executeSql(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(6) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE1
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
-
- check(
- new Row[] {
- Row.of(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12")
- .toInstant()
- .toString()
- .replace("Z", ""),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- Row.of(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23")
- .toInstant()
- .toString()
- .replace("Z", ""),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE1,
- new String[] {
- "user_id",
- "user_id_int16",
- "user_id_int32",
- "user_id_int64",
- "price_float32",
- "price_float64",
- "user_date",
- "user_timestamp",
- "decimal_column",
- "decimal32_column",
- "bool_flag",
- "message"
- });
- }
-
- @Test
- void testStreamSink() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
-
- tEnv.executeSql(
- "CREATE TABLE upsertSink ("
- + " real_data float"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE4
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
- check(
- new Row[] {Row.of(1.1f)},
- getMetadata().getJdbcUrlWithCredentials(),
- "REAL_TABLE",
- new String[] {"real_data"});
- }
-
- @Test
- void testBatchSink() throws Exception {
- TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
- tEnv.executeSql(
- "CREATE TABLE USER_RESULT("
- + "user_id BIGINT,"
- + "message VARCHAR"
- + ") WITH ( "
- + "'connector' = 'jdbc',"
- + "'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + "'table-name' = '"
- + OUTPUT_TABLE3
- + "',"
- + "'sink.buffer-flush.max-rows' = '10',"
- + "'sink.buffer-flush.interval' = '300ms',"
- + "'sink.max-retries' = '4'"
- + ")");
-
- TableResult tableResult =
- tEnv.executeSql(
- "INSERT INTO USER_RESULT\n"
- + "SELECT user_id, message "
- + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
- + "(42, 'Kim'), (1, 'Bob')) "
- + "AS UserCountTable(user_id, message)");
- tableResult.await();
-
- check(
- new Row[] {
- Row.of("Bob", 1),
- Row.of("Tom", 22),
- Row.of("Kim", 42),
- Row.of("Kim", 42),
- Row.of("Bob", 1)
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE3,
- new String[] {"message", "user_id"});
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
new file mode 100644
index 000000000..c1b092ae7
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.connector.jdbc.testutils.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.testutils.DatabaseExtension;
+import org.apache.flink.connector.jdbc.testutils.DatabaseMetadata;
+import org.apache.flink.util.FlinkRuntimeException;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+/** A Clickhouse database for testing. */
+public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
+
+ private static final ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
+
+ private static ClickHouseMetadata metadata;
+
+ public static ClickHouseMetadata getMetadata() {
+ if (!CONTAINER.isRunning()) {
+ throw new FlinkRuntimeException("Container is stopped.");
+ }
+ if (metadata == null) {
+ metadata = new ClickHouseMetadata(CONTAINER, false);
+ }
+ return metadata;
+ }
+
+ @Override
+ protected DatabaseMetadata startDatabase() throws Exception {
+ CONTAINER.start();
+ return getMetadata();
+ }
+
+ @Override
+ protected void stopDatabase() throws Exception {
+ CONTAINER.stop();
+ metadata = null;
+ }
+}
From 924c107128ed78fdab22f30843659a0ef71e9b83 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 25 May 2023 21:40:26 +0800
Subject: [PATCH 13/24] [FLINK-32068]connector jdbc support clickhouse
---
.../converter/ClickHouseRowConvert.java | 85 -------------------
1 file changed, 85 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
deleted file mode 100644
index a4ea4b2f1..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.data.DecimalData;
-import org.apache.flink.table.data.StringData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.types.logical.DecimalType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-
-import com.clickhouse.data.value.UnsignedByte;
-import com.clickhouse.data.value.UnsignedInteger;
-import com.clickhouse.data.value.UnsignedShort;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * ClickHouse.
- */
-public class ClickHouseRowConvert extends AbstractJdbcRowConverter {
- @Override
- public String converterName() {
- return "ClickHouse";
- }
-
- public ClickHouseRowConvert(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case NULL:
- return null;
- case BOOLEAN:
- case FLOAT:
- case DOUBLE:
- return val -> val;
- case TINYINT:
- return val -> ((Byte) val).byteValue();
- case SMALLINT:
- return val ->
- val instanceof UnsignedByte
- ? ((UnsignedByte) val).shortValue()
- : ((Short) val).shortValue();
- case INTEGER:
- return val ->
- val instanceof UnsignedShort
- ? ((UnsignedShort) val).intValue()
- : ((Integer) val).intValue();
- case BIGINT:
- return jdbcField -> {
- if (jdbcField instanceof UnsignedInteger) {
- return ((UnsignedInteger) jdbcField).longValue();
- } else if (jdbcField instanceof Long) {
- return ((Long) jdbcField).longValue();
- }
- // UINT64 is not supported,the uint64 range exceeds the long range
- throw new UnsupportedOperationException("Unsupported type:" + type);
- };
- case DECIMAL:
- final int precision = ((DecimalType) type).getPrecision();
- final int scale = ((DecimalType) type).getScale();
- return val ->
- val instanceof BigInteger
- ? DecimalData.fromBigDecimal(
- new BigDecimal((BigInteger) val, 0), precision, scale)
- : DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
- case CHAR:
- case VARCHAR:
- return val -> StringData.fromString((String) val);
- case DATE:
- return val -> Long.valueOf(((LocalDate) val).toEpochDay()).intValue();
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
- default:
- return super.createInternalConverter(type);
- }
- }
-}
From a56e0986ef187fd9c700c932db9db7474cd23adc Mon Sep 17 00:00:00 2001
From: leishuiyu
Date: Sat, 13 May 2023 18:04:12 +0800
Subject: [PATCH 14/24] [FLINK-32068] jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 +++++++++++++++++++
.../clickhouse/ClickhouseDialectFactory.java | 19 +++++
.../converter/ClickhouseRowConvert.java | 19 +++++
3 files changed, 116 insertions(+)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
new file mode 100644
index 000000000..0222e0337
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
@@ -0,0 +1,78 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** */
+public class ClickhouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
+ private static final int MAX_TIMESTAMP_PRECISION = 6;
+ private static final int MIN_TIMESTAMP_PRECISION = 1;
+
+ // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
+ private static final int MAX_DECIMAL_PRECISION = 65;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickhouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "Clickhouse";
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.VARBINARY,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
new file mode 100644
index 000000000..4924e4877
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** */
+@Internal
+public class ClickhouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickhouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
new file mode 100644
index 000000000..f696111b7
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.types.logical.RowType;
+
+/** */
+public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public String converterName() {
+ return "Clickhouse";
+ }
+
+ public ClickhouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+}
From 7caac6fe6367cdb5a28f31ff71de85cf43f7a35d Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 00:02:25 +0800
Subject: [PATCH 15/24] [FLINK-32068]connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 -------------------
.../clickhouse/ClickhouseDialectFactory.java | 19 -----
...eDatabase.java => ClickHouseDatabase.java} | 2 +-
3 files changed, 1 insertion(+), 98 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
rename flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/{ClickhouseDatabase.java => ClickHouseDatabase.java} (96%)
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
deleted file mode 100644
index 0222e0337..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** */
-public class ClickhouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
- private static final int MAX_DECIMAL_PRECISION = 65;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickhouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "Clickhouse";
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
deleted file mode 100644
index 4924e4877..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** */
-@Internal
-public class ClickhouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickhouseDialect();
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
similarity index 96%
rename from flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
rename to flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
index c1b092ae7..50a3f56d4 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickhouseDatabase.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/testutils/databases/clickhouse/ClickHouseDatabase.java
@@ -24,7 +24,7 @@
import org.testcontainers.containers.ClickHouseContainer;
/** A Clickhouse database for testing. */
-public class ClickhouseDatabase extends DatabaseExtension implements ClickHouseImages {
+public class ClickHouseDatabase extends DatabaseExtension implements ClickHouseImages {
private static final ClickHouseContainer CONTAINER =
new ClickHouseContainer(CLICKHOUSE_IMAGE_23);
From 8f1354006a61b48e745186a98e72c32316d8662c Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 00:03:10 +0800
Subject: [PATCH 16/24] [FLINK-32068]connector jdbc support clickhouse
---
.../converter/ClickhouseRowConvert.java | 19 -------------------
1 file changed, 19 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
deleted file mode 100644
index f696111b7..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.types.logical.RowType;
-
-/** */
-public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- @Override
- public String converterName() {
- return "Clickhouse";
- }
-
- public ClickhouseRowConvert(RowType rowType) {
- super(rowType);
- }
-}
From cd9ba3fabf4089e127ae1a607c0d6d374f0f7519 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 18 May 2023 23:50:10 +0800
Subject: [PATCH 17/24] [FLINK-32068] connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 +++++
.../clickhouse/ClickHouseDialectFactory.java | 19 ++
.../converter/ClickHouseRowConvert.java | 85 +++++
.../clickhouse/ClickHouseDatabase.java | 25 ++
.../clickhouse/ClickHouseImages.java | 4 +
.../clickhouse/ClickHouseMetadata.java | 66 ++++
.../clickhouse/ClickHouseDialectTypeTest.java | 41 +++
.../ClickHouseTableSourceITCase.java | 180 +++++++++++
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ++++++++++++++++++
9 files changed, 807 insertions(+)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
create mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
new file mode 100644
index 000000000..f4c359afe
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
@@ -0,0 +1,87 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** clickhouse dialect */
+public class ClickHouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
+ private static final int MAX_TIMESTAMP_PRECISION = 9;
+ private static final int MIN_TIMESTAMP_PRECISION = 0;
+
+ // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
+ // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
+ private static final int MAX_DECIMAL_PRECISION = 76;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickHouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "ClickHouse";
+ }
+
+ @Override
+ public Optional timestampPrecisionRange() {
+ return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
+ }
+
+ @Override
+ public Optional decimalPrecisionRange() {
+ return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
new file mode 100644
index 000000000..fcb27a8e6
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** clickhouse dialect factory */
+@Internal
+public class ClickHouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickHouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
new file mode 100644
index 000000000..a4ea4b2f1
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
@@ -0,0 +1,85 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.data.DecimalData;
+import org.apache.flink.table.data.StringData;
+import org.apache.flink.table.data.TimestampData;
+import org.apache.flink.table.types.logical.DecimalType;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+
+import com.clickhouse.data.value.UnsignedByte;
+import com.clickhouse.data.value.UnsignedInteger;
+import com.clickhouse.data.value.UnsignedShort;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+
+/**
+ * Runtime converter that responsible to convert between JDBC object and Flink internal object for
+ * ClickHouse.
+ */
+public class ClickHouseRowConvert extends AbstractJdbcRowConverter {
+ @Override
+ public String converterName() {
+ return "ClickHouse";
+ }
+
+ public ClickHouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+
+ @Override
+ protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
+ switch (type.getTypeRoot()) {
+ case NULL:
+ return null;
+ case BOOLEAN:
+ case FLOAT:
+ case DOUBLE:
+ return val -> val;
+ case TINYINT:
+ return val -> ((Byte) val).byteValue();
+ case SMALLINT:
+ return val ->
+ val instanceof UnsignedByte
+ ? ((UnsignedByte) val).shortValue()
+ : ((Short) val).shortValue();
+ case INTEGER:
+ return val ->
+ val instanceof UnsignedShort
+ ? ((UnsignedShort) val).intValue()
+ : ((Integer) val).intValue();
+ case BIGINT:
+ return jdbcField -> {
+ if (jdbcField instanceof UnsignedInteger) {
+ return ((UnsignedInteger) jdbcField).longValue();
+ } else if (jdbcField instanceof Long) {
+ return ((Long) jdbcField).longValue();
+ }
+ // UINT64 is not supported,the uint64 range exceeds the long range
+ throw new UnsupportedOperationException("Unsupported type:" + type);
+ };
+ case DECIMAL:
+ final int precision = ((DecimalType) type).getPrecision();
+ final int scale = ((DecimalType) type).getScale();
+ return val ->
+ val instanceof BigInteger
+ ? DecimalData.fromBigDecimal(
+ new BigDecimal((BigInteger) val, 0), precision, scale)
+ : DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
+ case CHAR:
+ case VARCHAR:
+ return val -> StringData.fromString((String) val);
+ case DATE:
+ return val -> Long.valueOf(((LocalDate) val).toEpochDay()).intValue();
+ case TIMESTAMP_WITHOUT_TIME_ZONE:
+ case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
+ return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
+ default:
+ return super.createInternalConverter(type);
+ }
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
new file mode 100644
index 000000000..0910fa0b9
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
@@ -0,0 +1,25 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+import org.apache.flink.connector.jdbc.databases.DatabaseTest;
+
+import org.testcontainers.containers.ClickHouseContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+/** clickhouse database for testing. */
+@Testcontainers
+public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
+
+ @Container
+ ClickHouseContainer CONTAINER =
+ new ClickHouseContainer(
+ DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
+ .asCompatibleSubstituteFor("yandex/clickhouse-server"));
+
+ @Override
+ default DatabaseMetadata getMetadata() {
+ return new ClickHouseMetadata(CONTAINER);
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
new file mode 100644
index 000000000..38ac99280
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
@@ -0,0 +1,4 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+/** clickhouse images */
+public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
new file mode 100644
index 000000000..c5c83988a
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
@@ -0,0 +1,66 @@
+package org.apache.flink.connector.jdbc.databases.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
+
+import org.testcontainers.containers.ClickHouseContainer;
+
+import javax.sql.XADataSource;
+
+/** clickhouse metadata */
+public class ClickHouseMetadata implements DatabaseMetadata {
+
+ private final String username;
+ private final String password;
+ private final String url;
+ private final String driver;
+ private final String version;
+ private final boolean xaEnabled;
+
+ public ClickHouseMetadata(ClickHouseContainer container) {
+ this(container, false);
+ }
+
+ public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
+ this.username = container.getUsername();
+ this.password = container.getPassword();
+ this.url = container.getJdbcUrl();
+ this.driver = container.getDriverClassName();
+ this.version = container.getDockerImageName();
+ this.xaEnabled = hasXaEnabled;
+ }
+
+ @Override
+ public String getJdbcUrl() {
+ return this.url;
+ }
+
+ @Override
+ public String getJdbcUrlWithCredentials() {
+ return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
+ }
+
+ @Override
+ public String getUsername() {
+ return this.username;
+ }
+
+ @Override
+ public String getPassword() {
+ return this.password;
+ }
+
+ @Override
+ public XADataSource buildXaDataSource() {
+ return null;
+ }
+
+ @Override
+ public String getDriverClass() {
+ return this.driver;
+ }
+
+ @Override
+ public String getVersion() {
+ return this.version;
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
new file mode 100644
index 000000000..7bcf6d267
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
@@ -0,0 +1,41 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
+public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
+
+ @Override
+ protected String testDialect() {
+ return "clickhouse";
+ }
+
+ @Override
+ protected List testData() {
+ return Arrays.asList(
+ createTestItem("CHAR"),
+ createTestItem("VARCHAR"),
+ createTestItem("BOOLEAN"),
+ createTestItem("TINYINT"),
+ createTestItem("SMALLINT"),
+ createTestItem("INTEGER"),
+ createTestItem("BIGINT"),
+ createTestItem("FLOAT"),
+ createTestItem("DOUBLE"),
+ createTestItem("DECIMAL(10, 4)"),
+ createTestItem("DECIMAL(38, 18)"),
+ createTestItem("DATE"),
+ createTestItem("TIMESTAMP(3)"),
+ createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
+ createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
+
+ // Not valid data
+ createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
+ createTestItem(
+ "VARBINARY(10)",
+ "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
new file mode 100644
index 000000000..aea4b14b1
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
@@ -0,0 +1,180 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.CollectionUtil;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/** The Table Source ITCase for {@link ClickHouseDialect}. */
+class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ private static final String INPUT_TABLE = "clickhouse_test_table";
+
+ private static StreamExecutionEnvironment env;
+ private static TableEnvironment tEnv;
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.execute(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_uint8 UInt8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_uint16 UInt16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_uint32 UInt32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
+ statement.execute(
+ "insert into "
+ + INPUT_TABLE
+ + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement statement = conn.createStatement()) {
+ statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
+ }
+ }
+
+ @BeforeEach
+ void before() throws Exception {
+ env = StreamExecutionEnvironment.getExecutionEnvironment();
+ tEnv = StreamTableEnvironment.create(env);
+ }
+
+ @Test
+ void testJdbcSource() throws Exception {
+ createFlinkTable();
+ Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ @Test
+ void testProject() throws Exception {
+ createFlinkTable();
+ Iterator collected =
+ tEnv.executeSql(
+ "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
+ + INPUT_TABLE)
+ .collect();
+ List result =
+ CollectionUtil.iteratorToList(collected).stream()
+ .map(Row::toString)
+ .sorted()
+ .collect(Collectors.toList());
+
+ List expected =
+ Stream.of(
+ "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
+ "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
+ .sorted()
+ .collect(Collectors.toList());
+ assertThat(result).isEqualTo(expected);
+
+ assert result.size() == 2;
+ }
+
+ private void createFlinkTable() {
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + INPUT_TABLE
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_uint8 SMALLINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_uint16 INTEGER NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_uint32 BIGINT NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(2) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "decimal64_column DECIMAL(18,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + INPUT_TABLE
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+ }
+}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
new file mode 100644
index 000000000..0d0de2e65
--- /dev/null
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
@@ -0,0 +1,300 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.TableResult;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Timestamp;
+
+import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
+import static org.apache.flink.table.api.Expressions.row;
+
+/** The Table Sink ITCase for {@link ClickHouseDialect}. */
+class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
+
+ public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
+ public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
+ public static final String OUTPUT_TABLE4 = "REAL_TABLE";
+
+ @BeforeAll
+ static void beforeAll() throws ClassNotFoundException, SQLException {
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + "(\n"
+ + " user_id Int8,\n"
+ + " user_id_int16 Int16,\n"
+ + " user_id_int32 Int32,\n"
+ + " user_id_int64 Int64,\n"
+ + " price_float32 Float32,\n"
+ + " price_float64 Float64,\n"
+ + " user_date Date,\n"
+ + " user_timestamp DateTime,\n"
+ + " decimal_column Decimal(3,1),\n"
+ + " decimal32_column Decimal32(4),\n"
+ + " decimal64_column Decimal64(4),\n"
+ + " bool_flag Bool,\n"
+ + " message String\n"
+ + ")\n"
+ + "ENGINE = MergeTree\n"
+ + "PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE3
+ + " (user_id Int8,"
+ + "message String,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ stat.execute(
+ "CREATE TABLE "
+ + OUTPUT_TABLE4
+ + " (user_id Int8,"
+ + "real_data Float32,"
+ + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
+ }
+ }
+
+ @AfterAll
+ static void afterAll() throws Exception {
+ TestValuesTableFactory.clearAllData();
+ Class.forName(CONTAINER.getDriverClassName());
+ try (Connection conn =
+ DriverManager.getConnection(
+ CONTAINER.getJdbcUrl(),
+ CONTAINER.getUsername(),
+ CONTAINER.getPassword());
+ Statement stat = conn.createStatement()) {
+ stat.execute("DROP TABLE " + OUTPUT_TABLE1);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE3);
+ stat.execute("DROP TABLE " + OUTPUT_TABLE4);
+ }
+ }
+
+ @Test
+ void testAllDataTypes() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
+
+ tEnv.createTemporaryView(
+ "myTable",
+ tEnv.fromValues(
+ DataTypes.ROW(
+ DataTypes.FIELD("user_id", DataTypes.TINYINT()),
+ DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
+ DataTypes.FIELD("user_id_int32", DataTypes.INT()),
+ DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
+ DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
+ DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
+ DataTypes.FIELD("user_date", DataTypes.DATE()),
+ DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
+ DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
+ DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
+ DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
+ DataTypes.FIELD("message", DataTypes.STRING())),
+ row(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ row(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")));
+
+ tEnv.executeSql(
+ "CREATE TABLE "
+ + OUTPUT_TABLE1
+ + " ("
+ + "user_id TINYINT NOT NULL,"
+ + "user_id_int16 SMALLINT NOT NULL,"
+ + "user_id_int32 INTEGER NOT NULL,"
+ + "user_id_int64 BIGINT NOT NULL,"
+ + "price_float32 FLOAT NOT NULL,"
+ + "price_float64 DOUBLE NOT NULL,"
+ + "user_date DATE NOT NULL,"
+ + "user_timestamp TIMESTAMP(6) NOT NULL,"
+ + "decimal_column DECIMAL(3,1) NOT NULL,"
+ + "decimal32_column DECIMAL(9,4) NOT NULL,"
+ + "bool_flag BOOLEAN NOT NULL,"
+ + "message VARCHAR NOT NULL"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrl()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE1
+ + "',"
+ + " 'username'='"
+ + getMetadata().getUsername()
+ + "',"
+ + " 'password'='"
+ + getMetadata().getPassword()
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
+
+ check(
+ new Row[] {
+ Row.of(
+ -128,
+ -32768,
+ -2147483648,
+ -9223372036854775808L,
+ -3.4e+38f,
+ -1.7e+308d,
+ "2023-01-01",
+ Timestamp.valueOf("2023-01-01 15:35:12")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ -99.9f,
+ -99999.9999d,
+ true,
+ "this is a test message"),
+ Row.of(
+ 127,
+ 32767,
+ 2147483647,
+ 9223372036854775807L,
+ 3.4e+38f,
+ 1.7e+308d,
+ "2023-01-02",
+ Timestamp.valueOf("2023-01-01 16:35:23")
+ .toInstant()
+ .toString()
+ .replace("Z", ""),
+ 99.9f,
+ 99999.9999d,
+ false,
+ "this is a test message")
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE1,
+ new String[] {
+ "user_id",
+ "user_id_int16",
+ "user_id_int32",
+ "user_id_int64",
+ "price_float32",
+ "price_float64",
+ "user_date",
+ "user_timestamp",
+ "decimal_column",
+ "decimal32_column",
+ "bool_flag",
+ "message"
+ });
+ }
+
+ @Test
+ void testStreamSink() throws Exception {
+ StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+ env.getConfig().enableObjectReuse();
+ StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
+
+ tEnv.executeSql(
+ "CREATE TABLE upsertSink ("
+ + " real_data float"
+ + ") WITH ("
+ + " 'connector'='jdbc',"
+ + " 'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + " 'table-name'='"
+ + OUTPUT_TABLE4
+ + "'"
+ + ")");
+
+ tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
+ check(
+ new Row[] {Row.of(1.1f)},
+ getMetadata().getJdbcUrlWithCredentials(),
+ "REAL_TABLE",
+ new String[] {"real_data"});
+ }
+
+ @Test
+ void testBatchSink() throws Exception {
+ TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
+ tEnv.executeSql(
+ "CREATE TABLE USER_RESULT("
+ + "user_id BIGINT,"
+ + "message VARCHAR"
+ + ") WITH ( "
+ + "'connector' = 'jdbc',"
+ + "'url'='"
+ + getMetadata().getJdbcUrlWithCredentials()
+ + "',"
+ + "'table-name' = '"
+ + OUTPUT_TABLE3
+ + "',"
+ + "'sink.buffer-flush.max-rows' = '10',"
+ + "'sink.buffer-flush.interval' = '300ms',"
+ + "'sink.max-retries' = '4'"
+ + ")");
+
+ TableResult tableResult =
+ tEnv.executeSql(
+ "INSERT INTO USER_RESULT\n"
+ + "SELECT user_id, message "
+ + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
+ + "(42, 'Kim'), (1, 'Bob')) "
+ + "AS UserCountTable(user_id, message)");
+ tableResult.await();
+
+ check(
+ new Row[] {
+ Row.of("Bob", 1),
+ Row.of("Tom", 22),
+ Row.of("Kim", 42),
+ Row.of("Kim", 42),
+ Row.of("Bob", 1)
+ },
+ getMetadata().getJdbcUrlWithCredentials(),
+ OUTPUT_TABLE3,
+ new String[] {"message", "user_id"});
+ }
+}
From 1a43dfe61813901c18361bf76c2acce075c5fe15 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 25 May 2023 01:12:10 +0800
Subject: [PATCH 18/24] [FLINK-32068]connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickHouseDialect.java | 87 -----
.../clickhouse/ClickHouseDialectFactory.java | 19 --
.../clickhouse/ClickHouseDatabase.java | 25 --
.../clickhouse/ClickHouseImages.java | 4 -
.../clickhouse/ClickHouseMetadata.java | 66 ----
.../clickhouse/ClickHouseDialectTypeTest.java | 41 ---
.../ClickHouseTableSourceITCase.java | 180 -----------
.../clickhouse/ClickhouseTableSinkITCase.java | 300 ------------------
8 files changed, 722 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
delete mode 100644 flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
deleted file mode 100644
index f4c359afe..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialect.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickHouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** clickhouse dialect */
-public class ClickHouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
- private static final int MAX_TIMESTAMP_PRECISION = 9;
- private static final int MIN_TIMESTAMP_PRECISION = 0;
-
- // Define MAX/MIN precision of DECIMAL type according to clickhouse docs:
- // https://clickhouse.com/docs/en/sql-reference/data-types/decimal
- private static final int MAX_DECIMAL_PRECISION = 76;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickHouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "ClickHouse";
- }
-
- @Override
- public Optional timestampPrecisionRange() {
- return Optional.of(Range.of(MIN_TIMESTAMP_PRECISION, MAX_TIMESTAMP_PRECISION));
- }
-
- @Override
- public Optional decimalPrecisionRange() {
- return Optional.of(Range.of(MIN_DECIMAL_PRECISION, MAX_DECIMAL_PRECISION));
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
deleted file mode 100644
index fcb27a8e6..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** clickhouse dialect factory */
-@Internal
-public class ClickHouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickHouseDialect();
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
deleted file mode 100644
index 0910fa0b9..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseDatabase.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-import org.apache.flink.connector.jdbc.databases.DatabaseTest;
-
-import org.testcontainers.containers.ClickHouseContainer;
-import org.testcontainers.junit.jupiter.Container;
-import org.testcontainers.junit.jupiter.Testcontainers;
-import org.testcontainers.utility.DockerImageName;
-
-/** clickhouse database for testing. */
-@Testcontainers
-public interface ClickHouseDatabase extends DatabaseTest, ClickHouseImages {
-
- @Container
- ClickHouseContainer CONTAINER =
- new ClickHouseContainer(
- DockerImageName.parse("clickhouse/clickhouse-server:23.4.2")
- .asCompatibleSubstituteFor("yandex/clickhouse-server"));
-
- @Override
- default DatabaseMetadata getMetadata() {
- return new ClickHouseMetadata(CONTAINER);
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
deleted file mode 100644
index 38ac99280..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseImages.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-/** clickhouse images */
-public interface ClickHouseImages {}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
deleted file mode 100644
index c5c83988a..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/ClickHouseMetadata.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.flink.connector.jdbc.databases.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.DatabaseMetadata;
-
-import org.testcontainers.containers.ClickHouseContainer;
-
-import javax.sql.XADataSource;
-
-/** clickhouse metadata */
-public class ClickHouseMetadata implements DatabaseMetadata {
-
- private final String username;
- private final String password;
- private final String url;
- private final String driver;
- private final String version;
- private final boolean xaEnabled;
-
- public ClickHouseMetadata(ClickHouseContainer container) {
- this(container, false);
- }
-
- public ClickHouseMetadata(ClickHouseContainer container, boolean hasXaEnabled) {
- this.username = container.getUsername();
- this.password = container.getPassword();
- this.url = container.getJdbcUrl();
- this.driver = container.getDriverClassName();
- this.version = container.getDockerImageName();
- this.xaEnabled = hasXaEnabled;
- }
-
- @Override
- public String getJdbcUrl() {
- return this.url;
- }
-
- @Override
- public String getJdbcUrlWithCredentials() {
- return String.format("%s?user=%s&password=%s", getJdbcUrl(), getUsername(), getPassword());
- }
-
- @Override
- public String getUsername() {
- return this.username;
- }
-
- @Override
- public String getPassword() {
- return this.password;
- }
-
- @Override
- public XADataSource buildXaDataSource() {
- return null;
- }
-
- @Override
- public String getDriverClass() {
- return this.driver;
- }
-
- @Override
- public String getVersion() {
- return this.version;
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
deleted file mode 100644
index 7bcf6d267..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseDialectTypeTest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest;
-
-import java.util.Arrays;
-import java.util.List;
-
-/** The Clickhouse params for {@link JdbcDialectTypeTest}. */
-public class ClickHouseDialectTypeTest extends JdbcDialectTypeTest {
-
- @Override
- protected String testDialect() {
- return "clickhouse";
- }
-
- @Override
- protected List testData() {
- return Arrays.asList(
- createTestItem("CHAR"),
- createTestItem("VARCHAR"),
- createTestItem("BOOLEAN"),
- createTestItem("TINYINT"),
- createTestItem("SMALLINT"),
- createTestItem("INTEGER"),
- createTestItem("BIGINT"),
- createTestItem("FLOAT"),
- createTestItem("DOUBLE"),
- createTestItem("DECIMAL(10, 4)"),
- createTestItem("DECIMAL(38, 18)"),
- createTestItem("DATE"),
- createTestItem("TIMESTAMP(3)"),
- createTestItem("TIMESTAMP WITHOUT TIME ZONE"),
- createTestItem("VARBINARY", "The ClickHouse dialect doesn't support type: BYTES"),
-
- // Not valid data
- createTestItem("BINARY", "The ClickHouse dialect doesn't support type: BINARY(1)."),
- createTestItem(
- "VARBINARY(10)",
- "The ClickHouse dialect doesn't support type: VARBINARY(10)."));
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
deleted file mode 100644
index aea4b14b1..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickHouseTableSourceITCase.java
+++ /dev/null
@@ -1,180 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-import org.apache.flink.util.CollectionUtil;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Iterator;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** The Table Source ITCase for {@link ClickHouseDialect}. */
-class ClickHouseTableSourceITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- private static final String INPUT_TABLE = "clickhouse_test_table";
-
- private static StreamExecutionEnvironment env;
- private static TableEnvironment tEnv;
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.execute(
- "CREATE TABLE "
- + INPUT_TABLE
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_uint8 UInt8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_uint16 UInt16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_uint32 UInt32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (-128,0,-32768,0, -2147483648,0,-9223372036854775808,-3.4e+38, -1.7e+308,'2023-01-01','2023-01-01 15:35:03', -99.9,-99999.9999,-99999999999999.9999,true,'this is a test message')");
- statement.execute(
- "insert into "
- + INPUT_TABLE
- + " values (127,255,32767,65535,2147483647,4294967295,9223372036854775807,3.4e+38,1.7e+308,'2023-01-02','2023-01-01 16:35:05', 99.9, 99999.9999,99999999999999.9999,false,'this is a test message')");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement statement = conn.createStatement()) {
- statement.executeUpdate("DROP TABLE " + INPUT_TABLE);
- }
- }
-
- @BeforeEach
- void before() throws Exception {
- env = StreamExecutionEnvironment.getExecutionEnvironment();
- tEnv = StreamTableEnvironment.create(env);
- }
-
- @Test
- void testJdbcSource() throws Exception {
- createFlinkTable();
- Iterator collected = tEnv.executeSql("SELECT * FROM " + INPUT_TABLE).collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -3.4E38, -1.7E308, 2023-01-01, 2023-01-01T15:35:03, -99.9, -99999.9999, -99999999999999.9999, true, this is a test message]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 3.4E38, 1.7E308, 2023-01-02, 2023-01-01T16:35:05, 99.9, 99999.9999, 99999999999999.9999, false, this is a test message]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- @Test
- void testProject() throws Exception {
- createFlinkTable();
- Iterator collected =
- tEnv.executeSql(
- "SELECT user_id,user_id_uint8,user_id_int16,user_id_uint16,user_id_int32,user_id_uint32,user_id_int64,decimal_column,decimal32_column,decimal64_column,bool_flag FROM "
- + INPUT_TABLE)
- .collect();
- List result =
- CollectionUtil.iteratorToList(collected).stream()
- .map(Row::toString)
- .sorted()
- .collect(Collectors.toList());
-
- List expected =
- Stream.of(
- "+I[-128, 0, -32768, 0, -2147483648, 0, -9223372036854775808, -99.9, -99999.9999, -99999999999999.9999, true]",
- "+I[127, 255, 32767, 65535, 2147483647, 4294967295, 9223372036854775807, 99.9, 99999.9999, 99999999999999.9999, false]")
- .sorted()
- .collect(Collectors.toList());
- assertThat(result).isEqualTo(expected);
-
- assert result.size() == 2;
- }
-
- private void createFlinkTable() {
- tEnv.executeSql(
- "CREATE TABLE "
- + INPUT_TABLE
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_uint8 SMALLINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_uint16 INTEGER NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_uint32 BIGINT NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(2) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "decimal64_column DECIMAL(18,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + INPUT_TABLE
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
- }
-}
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
deleted file mode 100644
index 0d0de2e65..000000000
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseTableSinkITCase.java
+++ /dev/null
@@ -1,300 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.databases.clickhouse.ClickHouseDatabase;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.table.api.DataTypes;
-import org.apache.flink.table.api.EnvironmentSettings;
-import org.apache.flink.table.api.TableEnvironment;
-import org.apache.flink.table.api.TableResult;
-import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
-import org.apache.flink.table.planner.factories.TestValuesTableFactory;
-import org.apache.flink.test.util.AbstractTestBase;
-import org.apache.flink.types.Row;
-
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Timestamp;
-
-import static org.apache.flink.connector.jdbc.internal.JdbcTableOutputFormatTest.check;
-import static org.apache.flink.table.api.Expressions.row;
-
-/** The Table Sink ITCase for {@link ClickHouseDialect}. */
-class ClickhouseTableSinkITCase extends AbstractTestBase implements ClickHouseDatabase {
-
- public static final String OUTPUT_TABLE1 = "dynamicSinkForInsert";
- public static final String OUTPUT_TABLE3 = "dynamicSinkForBatch";
- public static final String OUTPUT_TABLE4 = "REAL_TABLE";
-
- @BeforeAll
- static void beforeAll() throws ClassNotFoundException, SQLException {
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + "(\n"
- + " user_id Int8,\n"
- + " user_id_int16 Int16,\n"
- + " user_id_int32 Int32,\n"
- + " user_id_int64 Int64,\n"
- + " price_float32 Float32,\n"
- + " price_float64 Float64,\n"
- + " user_date Date,\n"
- + " user_timestamp DateTime,\n"
- + " decimal_column Decimal(3,1),\n"
- + " decimal32_column Decimal32(4),\n"
- + " decimal64_column Decimal64(4),\n"
- + " bool_flag Bool,\n"
- + " message String\n"
- + ")\n"
- + "ENGINE = MergeTree\n"
- + "PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE3
- + " (user_id Int8,"
- + "message String,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- stat.execute(
- "CREATE TABLE "
- + OUTPUT_TABLE4
- + " (user_id Int8,"
- + "real_data Float32,"
- + "user_timestamp DateTime) ENGINE = MergeTree PRIMARY KEY (user_id, user_timestamp)");
- }
- }
-
- @AfterAll
- static void afterAll() throws Exception {
- TestValuesTableFactory.clearAllData();
- Class.forName(CONTAINER.getDriverClassName());
- try (Connection conn =
- DriverManager.getConnection(
- CONTAINER.getJdbcUrl(),
- CONTAINER.getUsername(),
- CONTAINER.getPassword());
- Statement stat = conn.createStatement()) {
- stat.execute("DROP TABLE " + OUTPUT_TABLE1);
- stat.execute("DROP TABLE " + OUTPUT_TABLE3);
- stat.execute("DROP TABLE " + OUTPUT_TABLE4);
- }
- }
-
- @Test
- void testAllDataTypes() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inBatchMode());
-
- tEnv.createTemporaryView(
- "myTable",
- tEnv.fromValues(
- DataTypes.ROW(
- DataTypes.FIELD("user_id", DataTypes.TINYINT()),
- DataTypes.FIELD("user_id_int16", DataTypes.SMALLINT()),
- DataTypes.FIELD("user_id_int32", DataTypes.INT()),
- DataTypes.FIELD("user_id_int64", DataTypes.BIGINT()),
- DataTypes.FIELD("price_float32", DataTypes.FLOAT()),
- DataTypes.FIELD("price_float64", DataTypes.DOUBLE()),
- DataTypes.FIELD("user_date", DataTypes.DATE()),
- DataTypes.FIELD("user_timestamp", DataTypes.TIMESTAMP(3)),
- DataTypes.FIELD("decimal_column", DataTypes.DECIMAL(3, 1)),
- DataTypes.FIELD("decimal32_column", DataTypes.DECIMAL(9, 4)),
- DataTypes.FIELD("bool_flag", DataTypes.BOOLEAN()),
- DataTypes.FIELD("message", DataTypes.STRING())),
- row(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12").toInstant(),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- row(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23").toInstant(),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")));
-
- tEnv.executeSql(
- "CREATE TABLE "
- + OUTPUT_TABLE1
- + " ("
- + "user_id TINYINT NOT NULL,"
- + "user_id_int16 SMALLINT NOT NULL,"
- + "user_id_int32 INTEGER NOT NULL,"
- + "user_id_int64 BIGINT NOT NULL,"
- + "price_float32 FLOAT NOT NULL,"
- + "price_float64 DOUBLE NOT NULL,"
- + "user_date DATE NOT NULL,"
- + "user_timestamp TIMESTAMP(6) NOT NULL,"
- + "decimal_column DECIMAL(3,1) NOT NULL,"
- + "decimal32_column DECIMAL(9,4) NOT NULL,"
- + "bool_flag BOOLEAN NOT NULL,"
- + "message VARCHAR NOT NULL"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrl()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE1
- + "',"
- + " 'username'='"
- + getMetadata().getUsername()
- + "',"
- + " 'password'='"
- + getMetadata().getPassword()
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO " + OUTPUT_TABLE1 + " select * from myTable").await();
-
- check(
- new Row[] {
- Row.of(
- -128,
- -32768,
- -2147483648,
- -9223372036854775808L,
- -3.4e+38f,
- -1.7e+308d,
- "2023-01-01",
- Timestamp.valueOf("2023-01-01 15:35:12")
- .toInstant()
- .toString()
- .replace("Z", ""),
- -99.9f,
- -99999.9999d,
- true,
- "this is a test message"),
- Row.of(
- 127,
- 32767,
- 2147483647,
- 9223372036854775807L,
- 3.4e+38f,
- 1.7e+308d,
- "2023-01-02",
- Timestamp.valueOf("2023-01-01 16:35:23")
- .toInstant()
- .toString()
- .replace("Z", ""),
- 99.9f,
- 99999.9999d,
- false,
- "this is a test message")
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE1,
- new String[] {
- "user_id",
- "user_id_int16",
- "user_id_int32",
- "user_id_int64",
- "price_float32",
- "price_float64",
- "user_date",
- "user_timestamp",
- "decimal_column",
- "decimal32_column",
- "bool_flag",
- "message"
- });
- }
-
- @Test
- void testStreamSink() throws Exception {
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- env.getConfig().enableObjectReuse();
- StreamTableEnvironment tEnv =
- StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode());
-
- tEnv.executeSql(
- "CREATE TABLE upsertSink ("
- + " real_data float"
- + ") WITH ("
- + " 'connector'='jdbc',"
- + " 'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + " 'table-name'='"
- + OUTPUT_TABLE4
- + "'"
- + ")");
-
- tEnv.executeSql("INSERT INTO upsertSink SELECT CAST(1.1 as FLOAT)").await();
- check(
- new Row[] {Row.of(1.1f)},
- getMetadata().getJdbcUrlWithCredentials(),
- "REAL_TABLE",
- new String[] {"real_data"});
- }
-
- @Test
- void testBatchSink() throws Exception {
- TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
- tEnv.executeSql(
- "CREATE TABLE USER_RESULT("
- + "user_id BIGINT,"
- + "message VARCHAR"
- + ") WITH ( "
- + "'connector' = 'jdbc',"
- + "'url'='"
- + getMetadata().getJdbcUrlWithCredentials()
- + "',"
- + "'table-name' = '"
- + OUTPUT_TABLE3
- + "',"
- + "'sink.buffer-flush.max-rows' = '10',"
- + "'sink.buffer-flush.interval' = '300ms',"
- + "'sink.max-retries' = '4'"
- + ")");
-
- TableResult tableResult =
- tEnv.executeSql(
- "INSERT INTO USER_RESULT\n"
- + "SELECT user_id, message "
- + "FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), "
- + "(42, 'Kim'), (1, 'Bob')) "
- + "AS UserCountTable(user_id, message)");
- tableResult.await();
-
- check(
- new Row[] {
- Row.of("Bob", 1),
- Row.of("Tom", 22),
- Row.of("Kim", 42),
- Row.of("Kim", 42),
- Row.of("Bob", 1)
- },
- getMetadata().getJdbcUrlWithCredentials(),
- OUTPUT_TABLE3,
- new String[] {"message", "user_id"});
- }
-}
From afce80297022ad3c53bb48df319520eca2edb051 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Thu, 25 May 2023 21:40:26 +0800
Subject: [PATCH 19/24] [FLINK-32068]connector jdbc support clickhouse
---
.../converter/ClickHouseRowConvert.java | 85 -------------------
1 file changed, 85 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
deleted file mode 100644
index a4ea4b2f1..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickHouseRowConvert.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.data.DecimalData;
-import org.apache.flink.table.data.StringData;
-import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.types.logical.DecimalType;
-import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.flink.table.types.logical.RowType;
-
-import com.clickhouse.data.value.UnsignedByte;
-import com.clickhouse.data.value.UnsignedInteger;
-import com.clickhouse.data.value.UnsignedShort;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-
-/**
- * Runtime converter that responsible to convert between JDBC object and Flink internal object for
- * ClickHouse.
- */
-public class ClickHouseRowConvert extends AbstractJdbcRowConverter {
- @Override
- public String converterName() {
- return "ClickHouse";
- }
-
- public ClickHouseRowConvert(RowType rowType) {
- super(rowType);
- }
-
- @Override
- protected JdbcDeserializationConverter createInternalConverter(LogicalType type) {
- switch (type.getTypeRoot()) {
- case NULL:
- return null;
- case BOOLEAN:
- case FLOAT:
- case DOUBLE:
- return val -> val;
- case TINYINT:
- return val -> ((Byte) val).byteValue();
- case SMALLINT:
- return val ->
- val instanceof UnsignedByte
- ? ((UnsignedByte) val).shortValue()
- : ((Short) val).shortValue();
- case INTEGER:
- return val ->
- val instanceof UnsignedShort
- ? ((UnsignedShort) val).intValue()
- : ((Integer) val).intValue();
- case BIGINT:
- return jdbcField -> {
- if (jdbcField instanceof UnsignedInteger) {
- return ((UnsignedInteger) jdbcField).longValue();
- } else if (jdbcField instanceof Long) {
- return ((Long) jdbcField).longValue();
- }
- // UINT64 is not supported,the uint64 range exceeds the long range
- throw new UnsupportedOperationException("Unsupported type:" + type);
- };
- case DECIMAL:
- final int precision = ((DecimalType) type).getPrecision();
- final int scale = ((DecimalType) type).getScale();
- return val ->
- val instanceof BigInteger
- ? DecimalData.fromBigDecimal(
- new BigDecimal((BigInteger) val, 0), precision, scale)
- : DecimalData.fromBigDecimal((BigDecimal) val, precision, scale);
- case CHAR:
- case VARCHAR:
- return val -> StringData.fromString((String) val);
- case DATE:
- return val -> Long.valueOf(((LocalDate) val).toEpochDay()).intValue();
- case TIMESTAMP_WITHOUT_TIME_ZONE:
- case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
- return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
- default:
- return super.createInternalConverter(type);
- }
- }
-}
From ef919235ff7583e17c9afc2e4e45ffa45c335c44 Mon Sep 17 00:00:00 2001
From: leishuiyu
Date: Sat, 13 May 2023 18:04:12 +0800
Subject: [PATCH 20/24] [FLINK-32068] jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 +++++++++++++++++++
.../clickhouse/ClickhouseDialectFactory.java | 19 +++++
.../converter/ClickhouseRowConvert.java | 19 +++++
3 files changed, 116 insertions(+)
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
create mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
new file mode 100644
index 000000000..0222e0337
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
@@ -0,0 +1,78 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
+import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
+import org.apache.flink.table.types.logical.LogicalTypeRoot;
+import org.apache.flink.table.types.logical.RowType;
+
+import java.util.EnumSet;
+import java.util.Optional;
+import java.util.Set;
+
+/** */
+public class ClickhouseDialect extends AbstractDialect {
+
+ private static final long serialVersionUID = 1L;
+
+ // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
+ private static final int MAX_TIMESTAMP_PRECISION = 6;
+ private static final int MIN_TIMESTAMP_PRECISION = 1;
+
+ // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
+ // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
+ private static final int MAX_DECIMAL_PRECISION = 65;
+ private static final int MIN_DECIMAL_PRECISION = 1;
+
+ @Override
+ public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
+ return new ClickhouseRowConvert(rowType);
+ }
+
+ @Override
+ public String getLimitClause(long limit) {
+ return "LIMIT " + limit;
+ }
+
+ @Override
+ public Optional defaultDriverName() {
+ return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
+ }
+
+ @Override
+ public String quoteIdentifier(String identifier) {
+ return "`" + identifier + "`";
+ }
+
+ @Override
+ public Optional getUpsertStatement(
+ String tableName, String[] fieldNames, String[] uniqueKeyFields) {
+ return Optional.empty();
+ }
+
+ @Override
+ public String dialectName() {
+ return "Clickhouse";
+ }
+
+ @Override
+ public Set supportedTypes() {
+ // LegacyTypeInfoDataTypeConverter.
+ return EnumSet.of(
+ LogicalTypeRoot.CHAR,
+ LogicalTypeRoot.VARCHAR,
+ LogicalTypeRoot.BOOLEAN,
+ LogicalTypeRoot.VARBINARY,
+ LogicalTypeRoot.DECIMAL,
+ LogicalTypeRoot.TINYINT,
+ LogicalTypeRoot.SMALLINT,
+ LogicalTypeRoot.INTEGER,
+ LogicalTypeRoot.BIGINT,
+ LogicalTypeRoot.FLOAT,
+ LogicalTypeRoot.DOUBLE,
+ LogicalTypeRoot.DATE,
+ LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
+ LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
new file mode 100644
index 000000000..4924e4877
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.dialect.clickhouse;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
+import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
+
+/** */
+@Internal
+public class ClickhouseDialectFactory implements JdbcDialectFactory {
+ @Override
+ public boolean acceptsURL(String url) {
+ return url.startsWith("jdbc:clickhouse:");
+ }
+
+ @Override
+ public JdbcDialect create() {
+ return new ClickhouseDialect();
+ }
+}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
new file mode 100644
index 000000000..f696111b7
--- /dev/null
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
@@ -0,0 +1,19 @@
+package org.apache.flink.connector.jdbc.internal.converter;
+
+import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
+import org.apache.flink.table.types.logical.RowType;
+
+/** */
+public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public String converterName() {
+ return "Clickhouse";
+ }
+
+ public ClickhouseRowConvert(RowType rowType) {
+ super(rowType);
+ }
+}
From 4d0aa96688a766ef3ae0ecc6ed2e15b85438fd83 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 00:02:25 +0800
Subject: [PATCH 21/24] [FLINK-32068]connector jdbc support clickhouse
---
.../dialect/clickhouse/ClickhouseDialect.java | 78 -------------------
.../clickhouse/ClickhouseDialectFactory.java | 19 -----
2 files changed, 97 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
deleted file mode 100644
index 0222e0337..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialect.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.connector.jdbc.dialect.AbstractDialect;
-import org.apache.flink.connector.jdbc.internal.converter.ClickhouseRowConvert;
-import org.apache.flink.table.types.logical.LogicalTypeRoot;
-import org.apache.flink.table.types.logical.RowType;
-
-import java.util.EnumSet;
-import java.util.Optional;
-import java.util.Set;
-
-/** */
-public class ClickhouseDialect extends AbstractDialect {
-
- private static final long serialVersionUID = 1L;
-
- // Define MAX/MIN precision of TIMESTAMP type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fractional-seconds.html
- private static final int MAX_TIMESTAMP_PRECISION = 6;
- private static final int MIN_TIMESTAMP_PRECISION = 1;
-
- // Define MAX/MIN precision of DECIMAL type according to Mysql docs:
- // https://dev.mysql.com/doc/refman/8.0/en/fixed-point-types.html
- private static final int MAX_DECIMAL_PRECISION = 65;
- private static final int MIN_DECIMAL_PRECISION = 1;
-
- @Override
- public AbstractJdbcRowConverter getRowConverter(RowType rowType) {
- return new ClickhouseRowConvert(rowType);
- }
-
- @Override
- public String getLimitClause(long limit) {
- return "LIMIT " + limit;
- }
-
- @Override
- public Optional defaultDriverName() {
- return Optional.of("com.clickhouse.jdbc.ClickHouseDriver");
- }
-
- @Override
- public String quoteIdentifier(String identifier) {
- return "`" + identifier + "`";
- }
-
- @Override
- public Optional getUpsertStatement(
- String tableName, String[] fieldNames, String[] uniqueKeyFields) {
- return Optional.empty();
- }
-
- @Override
- public String dialectName() {
- return "Clickhouse";
- }
-
- @Override
- public Set supportedTypes() {
- // LegacyTypeInfoDataTypeConverter.
- return EnumSet.of(
- LogicalTypeRoot.CHAR,
- LogicalTypeRoot.VARCHAR,
- LogicalTypeRoot.BOOLEAN,
- LogicalTypeRoot.VARBINARY,
- LogicalTypeRoot.DECIMAL,
- LogicalTypeRoot.TINYINT,
- LogicalTypeRoot.SMALLINT,
- LogicalTypeRoot.INTEGER,
- LogicalTypeRoot.BIGINT,
- LogicalTypeRoot.FLOAT,
- LogicalTypeRoot.DOUBLE,
- LogicalTypeRoot.DATE,
- LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE,
- LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE);
- }
-}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
deleted file mode 100644
index 4924e4877..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/dialect/clickhouse/ClickhouseDialectFactory.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.dialect.clickhouse;
-
-import org.apache.flink.annotation.Internal;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
-import org.apache.flink.connector.jdbc.dialect.JdbcDialectFactory;
-
-/** */
-@Internal
-public class ClickhouseDialectFactory implements JdbcDialectFactory {
- @Override
- public boolean acceptsURL(String url) {
- return url.startsWith("jdbc:clickhouse:");
- }
-
- @Override
- public JdbcDialect create() {
- return new ClickhouseDialect();
- }
-}
From b597ed3cb3a982313b5516dc33bdccd6e17738b7 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Tue, 6 Jun 2023 22:30:15 +0800
Subject: [PATCH 22/24] [FLINK-32068] connector jdbc support clickhouse
---
.../converter/ClickhouseRowConvert.java | 19 -------------------
1 file changed, 19 deletions(-)
delete mode 100644 flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
deleted file mode 100644
index f696111b7..000000000
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/internal/converter/ClickhouseRowConvert.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.flink.connector.jdbc.internal.converter;
-
-import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
-import org.apache.flink.table.types.logical.RowType;
-
-/** */
-public class ClickhouseRowConvert extends AbstractJdbcRowConverter {
-
- private static final long serialVersionUID = 1L;
-
- @Override
- public String converterName() {
- return "Clickhouse";
- }
-
- public ClickhouseRowConvert(RowType rowType) {
- super(rowType);
- }
-}
From 6ce9bf989c39d1a7bbc576827e60726f16cbc5ba Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Wed, 7 Jun 2023 20:59:35 +0800
Subject: [PATCH 23/24] [FLINK-32068] connector jdbc support clickhouse
,support map types
---
.../clickhouse/dialect/ClickHouseDialect.java | 1 +
.../clickhouse/dialect/ClickHouseRowConvert.java | 14 ++++++++++++++
.../flink/connector/jdbc/utils/JdbcTypeUtil.java | 2 ++
.../table/ClickHouseTableSourceITCase.java | 15 ++++++++++++---
4 files changed, 29 insertions(+), 3 deletions(-)
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
index 620e89352..e27296d31 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseDialect.java
@@ -80,6 +80,7 @@ public Set supportedTypes() {
LogicalTypeRoot.FLOAT,
LogicalTypeRoot.DOUBLE,
LogicalTypeRoot.DATE,
+ LogicalTypeRoot.MAP,
LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE,
LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE);
}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
index 1fd1d8e58..f8aa16b26 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/databases/clickhouse/dialect/ClickHouseRowConvert.java
@@ -2,6 +2,7 @@
import org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter;
import org.apache.flink.table.data.DecimalData;
+import org.apache.flink.table.data.GenericMapData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.TimestampData;
import org.apache.flink.table.types.logical.DecimalType;
@@ -16,6 +17,7 @@
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
+import java.util.Map;
/**
* Runtime converter that responsible to convert between JDBC object and Flink internal object for
@@ -79,8 +81,20 @@ protected JdbcDeserializationConverter createInternalConverter(LogicalType type)
case TIMESTAMP_WITHOUT_TIME_ZONE:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return val -> TimestampData.fromLocalDateTime((LocalDateTime) val);
+ case MAP:
+ return val -> new GenericMapData((Map, ?>) val);
default:
return super.createInternalConverter(type);
}
}
+
+ @Override
+ protected JdbcSerializationConverter createExternalConverter(LogicalType type) {
+ switch (type.getTypeRoot()) {
+ case MAP:
+ return (val, index, statement) -> statement.setObject(index, val);
+ default:
+ return super.createExternalConverter(type);
+ }
+ }
}
diff --git a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtil.java b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtil.java
index 4c4c434b8..d44a8dc82 100644
--- a/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtil.java
+++ b/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtil.java
@@ -51,6 +51,7 @@
import static org.apache.flink.table.types.logical.LogicalTypeRoot.DOUBLE;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.FLOAT;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.INTEGER;
+import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.SMALLINT;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE;
@@ -107,6 +108,7 @@ public class JdbcTypeUtil {
put(TIME_WITHOUT_TIME_ZONE, Types.TIME);
put(DECIMAL, Types.DECIMAL);
put(ARRAY, Types.ARRAY);
+ put(MAP, Types.JAVA_OBJECT);
}
});
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
index 0dd88ed07..ef5580abb 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/clickhouse/table/ClickHouseTableSourceITCase.java
@@ -10,6 +10,7 @@
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.List;
import java.util.TimeZone;
@@ -38,13 +39,19 @@ protected ClickhouseTableRow createInputTable() {
field("timestamp6_col", dbType("DateTime(6)"), DataTypes.TIMESTAMP(6)),
field("decimal_column", dbType("Decimal(3,1)"), DataTypes.DECIMAL(3, 1)),
field("bool_flag", dbType("Bool"), DataTypes.BOOLEAN()),
- field("message", dbType("String"), DataTypes.VARCHAR(100)));
+ field("message", dbType("String"), DataTypes.VARCHAR(100)),
+ field(
+ "test_map",
+ dbType("Map(Int64,Int64)"),
+ DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BIGINT())));
}
@Override
protected List getTestData() {
TimeZone timeZone = TimeZone.getTimeZone("GTM+0");
TimeZone.setDefault(timeZone);
+ HashMap map = new HashMap<>();
+ map.put(1L, 2L);
return Arrays.asList(
Row.of(
1L,
@@ -59,7 +66,8 @@ protected List getTestData() {
LocalDateTime.parse("2020-01-01T15:35:00.123456"),
BigDecimal.valueOf(-99.9),
true,
- "this is a test message"),
+ "this is a test message",
+ map),
Row.of(
2L,
(byte) 2,
@@ -73,6 +81,7 @@ protected List getTestData() {
LocalDateTime.parse("2020-01-01T15:36:01.123456"),
BigDecimal.valueOf(99.9),
false,
- "this is a test message"));
+ "this is a test message",
+ map));
}
}
From 4facf4cc9cff8d1ad46fb5b06d1499bf56d55ca1 Mon Sep 17 00:00:00 2001
From: wending <1062698930@qq.com>
Date: Wed, 12 Jul 2023 23:24:19 +0800
Subject: [PATCH 24/24] [FLINK-32068] connector jdbc support clickhouse ,fix
test fail
---
.../org/apache/flink/connector/jdbc/utils/JdbcTypeUtilTest.java | 1 -
1 file changed, 1 deletion(-)
diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtilTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtilTest.java
index b4c672090..6536f9d01 100644
--- a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtilTest.java
+++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/utils/JdbcTypeUtilTest.java
@@ -35,7 +35,6 @@ class JdbcTypeUtilTest {
void testTypeConversions() {
assertThat(logicalTypeToSqlType(LogicalTypeRoot.INTEGER)).isEqualTo(Types.INTEGER);
testUnsupportedType(LogicalTypeRoot.RAW);
- testUnsupportedType(LogicalTypeRoot.MAP);
}
private static void testUnsupportedType(LogicalTypeRoot logicalTypeRoot) {