Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FLINK-36009] Architecture tests fixes #140

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
#Tue May 28 16:21:17 CEST 2024
#Wed Aug 28 22:23:37 CEST 2024
Return\ and\ argument\ types\ of\ methods\ annotated\ with\ @Public\ must\ be\ annotated\ with\ @Public.=596bd7eb-1ed5-4963-a56a-941766d61233
Connector\ production\ code\ must\ not\ depend\ on\ non-public\ API\ outside\ of\ connector\ packages=f242ee43-a122-4d2f-a291-6439cdb9029f
ITCASE\ tests\ should\ use\ a\ MiniCluster\ resource\ or\ extension=6b9ab1b0-c14d-4667-bab5-407b81fba98b
Expand All @@ -8,3 +8,4 @@ Options\ for\ connectors\ and\ formats\ should\ reside\ in\ a\ consistent\ packa
Tests\ inheriting\ from\ AbstractTestBase\ should\ have\ name\ ending\ with\ ITCase=0f94f11b-9bc8-445a-8be8-e7dfd8a384b0
Return\ and\ argument\ types\ of\ methods\ annotated\ with\ @PublicEvolving\ must\ be\ annotated\ with\ @Public(Evolving).=d45c3af5-52c6-45fd-9926-75e75e77473a
Classes\ in\ API\ packages\ should\ have\ at\ least\ one\ API\ visibility\ annotation.=0c7b60d6-a12c-432d-8a72-747cabaeb7ed
Connector\ production\ code\ must\ depend\ only\ on\ public\ API\ when\ outside\ of\ connector\ packages=f2faddb7-ac11-4947-ac31-59d16e454201
117 changes: 117 additions & 0 deletions flink-connector-jdbc-architecture/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-parent</artifactId>
<version>3.3-SNAPSHOT</version>
</parent>

<artifactId>flink-connector-jdbc-architecture</artifactId>
<name>Flink : Connectors : JDBC : Architecture</name>

<packaging>jar</packaging>

<dependencies>
<!-- Flink ArchUnit -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-architecture-tests-base</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-architecture-tests-test</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-architecture-tests-production</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>

<!-- Flink Jdbc Modules To Test -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-core</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-cratedb</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-db2</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-mysql</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-oceanbase</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-oracle</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-postgres</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-sqlserver</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc-trino</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.architecture;
package org.apache.flink.connector.jdbc.architecture;

import org.apache.flink.architecture.ProductionCodeArchitectureBase;
import org.apache.flink.architecture.common.ImportOptions;

import com.tngtech.archunit.core.importer.ImportOption;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.architecture;
package org.apache.flink.connector.jdbc.architecture;

import org.apache.flink.architecture.TestCodeArchitectureTestBase;
import org.apache.flink.architecture.common.ImportOptions;

import com.tngtech.archunit.core.importer.ImportOption;
Expand Down
7 changes: 0 additions & 7 deletions flink-connector-jdbc-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -151,13 +151,6 @@ under the License.
<scope>test</scope>
</dependency>

<!-- ArchUit test dependencies -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-architecture-tests-test</artifactId>
<scope>test</scope>
</dependency>

<!-- TestContainer dependencies -->
<dependency>
<groupId>org.testcontainers</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.flink.connector.jdbc;

import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.util.Preconditions;
import org.apache.flink.connector.jdbc.core.util.Precondition;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
Expand Down Expand Up @@ -47,14 +47,14 @@ protected JdbcConnectionOptions(
@Nullable String driverName,
int connectionCheckTimeoutSeconds,
@Nonnull Properties properties) {
Preconditions.checkArgument(
Precondition.checkArgument(
connectionCheckTimeoutSeconds > 0,
"Connection check timeout seconds shouldn't be smaller than 1");
this.url = Preconditions.checkNotNull(url, "jdbc url is empty");
this.url = Precondition.checkNotNull(url, "jdbc url is empty");
this.driverName = driverName;
this.connectionCheckTimeoutSeconds = connectionCheckTimeoutSeconds;
this.properties =
Preconditions.checkNotNull(properties, "Connection properties must be non-null");
Precondition.checkNotNull(properties, "Connection properties must be non-null");
}

public String getDbURL() {
Expand Down Expand Up @@ -113,8 +113,8 @@ public JdbcConnectionOptionsBuilder withDriverName(String driverName) {
}

public JdbcConnectionOptionsBuilder withProperty(String propKey, String propVal) {
Preconditions.checkNotNull(propKey, "Connection property key mustn't be null");
Preconditions.checkNotNull(propVal, "Connection property value mustn't be null");
Precondition.checkNotNull(propKey, "Connection property key mustn't be null");
Precondition.checkNotNull(propVal, "Connection property value mustn't be null");
this.properties.put(propKey, propVal);
return this;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.flink.connector.jdbc;

import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.util.Preconditions;
import org.apache.flink.connector.jdbc.core.util.Precondition;

import java.io.Serializable;
import java.util.Optional;
Expand Down Expand Up @@ -72,7 +72,7 @@ private JdbcExactlyOnceOptions(
this.allowOutOfOrderCommits = allowOutOfOrderCommits;
this.timeoutSec = timeoutSec.orElse(null);
this.transactionPerConnection = transactionPerConnection;
Preconditions.checkArgument(this.maxCommitAttempts > 0, "maxCommitAttempts should be > 0");
Precondition.checkArgument(this.maxCommitAttempts > 0, "maxCommitAttempts should be > 0");
}

public static JdbcExactlyOnceOptions defaults() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.flink.connector.jdbc;

import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.util.Preconditions;
import org.apache.flink.connector.jdbc.core.util.Precondition;

import java.io.Serializable;
import java.util.Objects;
Expand All @@ -35,7 +35,7 @@ public class JdbcExecutionOptions implements Serializable {
private final int maxRetries;

private JdbcExecutionOptions(long batchIntervalMs, int batchSize, int maxRetries) {
Preconditions.checkArgument(maxRetries >= 0);
Precondition.checkArgument(maxRetries >= 0);
this.batchIntervalMs = batchIntervalMs;
this.batchSize = batchSize;
this.maxRetries = maxRetries;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package org.apache.flink.connector.jdbc;

import org.apache.flink.annotation.Experimental;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.io.DefaultInputSplitAssigner;
import org.apache.flink.api.common.io.InputFormat;
import org.apache.flink.api.common.io.RichInputFormat;
Expand All @@ -29,14 +28,15 @@
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.jdbc.core.datastream.source.JdbcSource;
import org.apache.flink.connector.jdbc.core.datastream.source.JdbcSourceBuilder;
import org.apache.flink.connector.jdbc.core.util.Precondition;
import org.apache.flink.connector.jdbc.core.util.VisibleForTest;
import org.apache.flink.connector.jdbc.datasource.connections.JdbcConnectionProvider;
import org.apache.flink.connector.jdbc.datasource.connections.SimpleJdbcConnectionProvider;
import org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider;
import org.apache.flink.core.io.GenericInputSplit;
import org.apache.flink.core.io.InputSplit;
import org.apache.flink.core.io.InputSplitAssigner;
import org.apache.flink.types.Row;
import org.apache.flink.util.Preconditions;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -325,12 +325,12 @@ public InputSplitAssigner getInputSplitAssigner(InputSplit[] inputSplits) {
return new DefaultInputSplitAssigner(inputSplits);
}

@VisibleForTesting
@VisibleForTest
protected PreparedStatement getStatement() {
return statement;
}

@VisibleForTesting
@VisibleForTest
protected Connection getDbConn() {
return connectionProvider.getConnection();
}
Expand Down Expand Up @@ -404,7 +404,7 @@ public JdbcInputFormatBuilder setRowTypeInfo(RowTypeInfo rowTypeInfo) {
}

public JdbcInputFormatBuilder setFetchSize(int fetchSize) {
Preconditions.checkArgument(
Precondition.checkArgument(
fetchSize == Integer.MIN_VALUE || fetchSize > 0,
"Illegal value %s for fetchSize, has to be positive or Integer.MIN_VALUE.",
fetchSize);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.connector.jdbc.core.table.JdbcDynamicTableFactory;
import org.apache.flink.connector.jdbc.core.util.Precondition;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.AbstractCatalog;
Expand Down Expand Up @@ -50,7 +51,6 @@
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.factories.Factory;
import org.apache.flink.table.types.DataType;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.StringUtils;
import org.apache.flink.util.TemporaryClassLoaderContext;

Expand Down Expand Up @@ -83,9 +83,9 @@
import static org.apache.flink.connector.jdbc.core.table.JdbcConnectorOptions.URL;
import static org.apache.flink.connector.jdbc.core.table.JdbcConnectorOptions.USERNAME;
import static org.apache.flink.connector.jdbc.core.table.JdbcDynamicTableFactory.IDENTIFIER;
import static org.apache.flink.connector.jdbc.core.util.Precondition.checkArgument;
import static org.apache.flink.connector.jdbc.core.util.Precondition.checkNotNull;
import static org.apache.flink.table.factories.FactoryUtil.CONNECTOR;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;

/** Abstract catalog for any JDBC catalogs. */
@PublicEvolving
Expand Down Expand Up @@ -130,7 +130,7 @@ public AbstractJdbcCatalog(
this.userClassLoader = userClassLoader;
this.baseUrl = baseUrl.endsWith("/") ? baseUrl : baseUrl + "/";
this.defaultUrl = getDatabaseUrl(defaultDatabase);
this.connectionProperties = Preconditions.checkNotNull(connectionProperties);
this.connectionProperties = Precondition.checkNotNull(connectionProperties);
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(connectionProperties.getProperty(USER_KEY)));
checkArgument(
Expand Down Expand Up @@ -197,7 +197,7 @@ protected Optional<UniqueConstraint> getPrimaryKey(
String columnName = rs.getString("COLUMN_NAME");
pkName = rs.getString("PK_NAME"); // all the PK_NAME should be the same
int keySeq = rs.getInt("KEY_SEQ");
Preconditions.checkState(
Precondition.checkState(
!keySeqColumnName.containsKey(keySeq - 1),
"The field(s) of primary key must be from the same table.");
keySeqColumnName.put(keySeq - 1, columnName); // KEY_SEQ is 1-based index
Expand Down Expand Up @@ -251,7 +251,7 @@ public void alterDatabase(String name, CatalogDatabase newDatabase, boolean igno
public CatalogDatabase getDatabase(String databaseName)
throws DatabaseNotExistException, CatalogException {

Preconditions.checkState(
Precondition.checkState(
!StringUtils.isNullOrWhitespaceOnly(databaseName),
"Database name must not be blank.");
if (listDatabases().contains(databaseName)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@
package org.apache.flink.connector.jdbc.core.database.dialect;

import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.connector.jdbc.core.util.Precondition;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LogicalTypeRoot;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.table.types.logical.VarBinaryType;
import org.apache.flink.util.Preconditions;

import java.util.Arrays;
import java.util.Optional;
Expand Down Expand Up @@ -253,7 +253,7 @@ public static class Range {
private final int max;

public static Range of(int min, int max) {
Preconditions.checkArgument(
Precondition.checkArgument(
min <= max,
String.format(
"The range min value in range %d must be <= max value %d", min, max));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import java.time.LocalDateTime;
import java.time.LocalTime;

import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.connector.jdbc.core.util.Precondition.checkNotNull;

/** Base class for all converters that convert between JDBC object and Flink internal object. */
public abstract class AbstractDialectConverter implements JdbcDialectConverter {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

import javax.sql.XADataSource;

import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.connector.jdbc.core.util.Precondition.checkNotNull;

/** Builder to construct {@link JdbcSink}. */
@PublicEvolving
Expand Down
Loading